You are viewing an old version of this page. View the current version.

Compare with Current View Page History

« Previous Version 13 Next »

Code

https://github.com/apache/incubator-eagle/tree/master/eagle-core/eagle-query/eagle-storage-jdbc

Configuration

  • eagle.storage.type: jdbc
  • eagle.storage.adaptermysql mysql oracle postgres mssql hsqldb derby
  • eagle.storage.username
  • eagle.storage.password
  • eagle.storage.database
  • eagle.storage.connection.url
  • eagle.storage.connection.props
  • eagle.storage.driver.class
  • eagle.storage.connection.max

Sample:

eagle.storage.type=jdbc
eagle.storage.adapter=mysql
eagle.storage.username=eagle
eagle.storage.password=eagle
eagle.storage.database=eagle
eagle.storage.connection.url=jdbc:mysql://localhost:3306/eagle
eagle.storage.connection.props=encoding=UTF-8
eagle.storage.driver.class=com.mysql.jdbc.Driver
eagle.storage.connection.max=8

Rowkey

We simply use UUID as row key of entities and use the key as the primary key named "uuid" in RDBMS table schema.

Features

  • Support basic entity operation like CREATE, READ, UPDATE and DELETE
  • Support flatten aggregation query
  • Support customized entity field type (JdbcEntityDefinitionManager#registerJdbcSerDeser)

Dependencies

 

Table DDL

Alert framework related entity

-- alertdatasource
drop table alertdatasource_alertdatasource;
create table alertdatasource_alertdatasource(uuid varchar(100), timestamp bigint, enabled bool, config varchar(100), description varchar(100), site varchar(10),datasource varchar(10));
insert into alertdatasource_alertdatasource (uuid, timestamp, enabled, config, description, site, datasource) values("uuid-1", 0, 1,'testconfig','testdesc','testsite','testsource');
 
-- alertstream
drop table alertstream_alertstream;
create table alertstream_alertstream(uuid varchar(100), timestamp bigint, datasource varchar(100), streamname varchar(100), description varchar(200));
insert into alertstream_alertstream(uuid, timestamp, datasource, streamname, description) values("uuid-1",  0, 'testsource', "teststream", "testdesc");
 
-- alertdef_alertdef
drop table alertdef_alertdef;
create table alertdef_alertdef(uuid varchar(100), timestamp bigint, site varchar(100), datasource varchar(100), alertexecutorid varchar(100), policyid varchar(100), policytype varchar(100), description varchar(200), policydef varchar(1000), dedupedef varchar(1000), notificationdef varchar(1000), remediationdef varchar(1000), enabled bool, owner varchar(100), lastmodifieddate bigint, severity bigint, createdtime bigint)
insert into alertdef_alertdef(uuid, timestamp, site, datasource, alertexecutorid, policyid, policytype, description, policydef, dedupedef, notificationdef, remediationdef, enabled, owner, lastmodifieddate, severity, createdtime) values("uuid-1", 0, "testsite", "testsource", "testalertexecutorid", "testpolicyid", "siddhiCEPEngine", "testdescription", "testpolicydef", "","","",true,"testowner", 0, 0, 0);
 
-- alertexecutor
drop table alertexecutor_alertexecutor;
create table alertexecutor_alertexecutor(uuid varchar(100), timestamp bigint, datasource varchar(100), alertexecutorid varchar(100), streamname varchar(100), description varchar(100));
insert into alertexecutor_alertexecutor(uuid, timestamp, datasource, alertexecutorid, streamname, description) values ("uuid-1", 0, "testsource", "testalertexecutorid", "teststreamname", "testdesc");
 
-- alertdetail
drop table alertdetail_hadoop;
create table alertdetail_hadoop(uuid varchar(100), timestamp bigint, site varchar(100), datasource varchar(100), hostname varchar(100), policyid varchar(100), alertsource varchar(100), sourcestreams varchar(100),  alertexecutorid varchar(100), description varchar(100), remediationid varchar(100), remediationcallback varchar(100), alertcontext varchar(1000), streamid varchar(100));
insert into alertdetail_hadoop(uuid, timestamp, site, datasource, hostname, policyid, alertsource, sourcestreams, alertexecutorid, description, remediationid, remediationcallback, alertcontext, streamid) values("uuid-1", 0, "testsite", "testsource", "testhost", 'testpolicyid', "testalertsource", "testsourcestreams", "testalertexecutorid", "testdesc", "", "", "", "teststreamid");
 
-- alertstreamschema
drop table alertstreamschema_alertstreamschema;
create table alertstreamschema_alertstreamschema(uuid varchar(100), timestamp bigint, datasource varchar(100), streamname varchar(100), attrname varchar(100), attrtype varchar(20), category varchar(20), attrValueResolver varchar(100), usedastag bool, attrdescription varchar(100), attrdisplayname varchar(100), defaultvalue varchar(100));
insert into alertstreamschema_alertstreamschema(uuid, timestamp, datasource, streamname, attrname, attrtype, category, attrValueResolver, usedastag, attrdescription, attrdisplayname, defaultvalue) values("uuid-1", 0, "testsource", "teststream", "testattrname", "testattrtype", "testcategory", "testattrvalueresolver", "testusedastag", "testattrdescription", "testattrdisplayname", "testdefaultvalue");

HdfsAudit/HBase/Hive monitoring related tables

-- filesensitivity
drop table filesensitivity_filesensitivity;
create table filesensitivity_filesensitivity(uuid varchar(100), timestamp bigint, site varchar(20), filedir varchar(100), sensitivitytype varchar(20));
insert into filesensitivity_filesensitivity (uuid, timestamp, site, filedir, sensitivitytype) values('uuid-1', 0, 'testsite', 'testfiledir' , 'testsensitivitytype');

-- hiveresourcesensitivity
create table hiveresourcesensitivity_hiveresourcesensitivity(uuid varchar(100), timestamp bigint, site varchar(20), hiveresource varchar(100), sensitivitytype varchar(20));
insert into hiveresourcesensitivity_hiveresourcesensitivity (uuid, timestamp, site, hiveresource, sensitivitytype) values('uuid-1', 0, 'testsite', 'testhiveresource' , 'testsensitivitytype');
 
-- hbaseresourcesensitivity
drop table hbaseresourcesensitivity_hbaseresourcesensitivity;
create table hbaseresourcesensitivity_hbaseresourcesensitivity (uuid varchar(100), timestamp bigint, site varchar(20), hbaseresource varchar(100), sensitivitytype varchar(20));
insert into hbaseresourcesensitivity_hbaseresourcesensitivity (uuid, timestamp, site, hbaseresource, sensitivitytype) values('uuid-1', 0, 'testsite', 'testhbaseresource' , 'testsensitivitytype');

Machine learning related tables

-- mlmodel
drop table mlmodel_mlmodel;
create table mlmodel_mlmodel(uuid varchar(100), timestamp bigint, site varchar(20), user varchar(20), algorithm varchar(100), content varchar(2000), version bigint);
insert into mlmodel_mlmodel(uuid, timestamp, site, user, algorithm, content, version) values('uuid-1',0,'testsite','testuser','testalgorithm','testcontent',0);
 
-- userprofile_schedule_command
drop table userprofile_schedule_command;
create table userprofile_schedule_command(uuid varchar(100), timestamp bigint, site varchar(20), type varchar(20), status varchar(20), updatetime bigint, detail varchar(200));
insert into userprofile_schedule_command(uuid, timestamp, site, type, status, updatetime, detail) values('uuid-1',0,'testsite','testtype','teststatus',0,'testdetail');

TO-DO

  • Support time-series based aggregation
  • Investigate why writing performance becomes slower as records count in table increases
  • Implement batch insert in JdbcEntityWriterImpl
  • Implement DDL Management to generate default table schema DDL according entity definition
  • No labels