Skip to content
This repository has been archived by the owner on Jan 21, 2021. It is now read-only.

Commit

Permalink
Merge pull request #440 from OpenSRP/master_postgres_support
Browse files Browse the repository at this point in the history
Master postgres support
  • Loading branch information
Ephraim (Keyman) Muhia authored May 7, 2018
2 parents bbc5e13 + 07ede16 commit 08ec4b4
Show file tree
Hide file tree
Showing 306 changed files with 33,771 additions and 2,415 deletions.
8 changes: 8 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,21 @@ sudo: required
services:
- couchdb
- mysql
addons:
postgresql: "9.6"
install:
- echo "skip" # or anything that returns 0 status.
- sudo updatedb #Giving couchdb time to start.
- sudo netstat -tuplen # listing all the port for debug purpose.
before_script:
- curl -X PUT http://localhost:5984/_config/admins/rootuser -d '"adminpass"'
- echo "USE mysql;\nUPDATE user SET password=PASSWORD('VA1913wm') WHERE user='root';\nFLUSH PRIVILEGES;\n" | mysql -u root
- psql -c "CREATE ROLE opensrp_admin WITH LOGIN SUPERUSER PASSWORD 'admin';" -U postgres
- psql -c "CREATE DATABASE opensrp_test WITH OWNER = opensrp_admin;" -U postgres
- chmod +x assets/migrations/run_mybatis_migrations.sh
- sudo ./assets/migrations/run_mybatis_migrations.sh
- sudo mkdir -p /opt/multimedia
- sudo chown -R travis:travis /opt/multimedia
script:
- javac -version
- java -version
Expand Down
4 changes: 3 additions & 1 deletion assets/config/opensrp.properties
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ js.directory.name=/ziggy
form.directory.name=/form
multimedia.directory.name=/opt/multimedia
form.download.files=form.xml, model.xml, form_definition.json
multimedia.directory.name=../multimedia/opensrp
qrcodes.directory.name=/home/opensrp/qr-codes/

schedule.config.path=/schedules/schedule-config.xls
Expand Down Expand Up @@ -88,3 +87,6 @@ file.maxUploadSize=20971520

#opnenSRP Site url
opensrp.site.url=""

#search for missing clients
opensrp.sync.search.missing.client=false
38 changes: 38 additions & 0 deletions assets/migrations/README
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
Welcome!

This is an MyBatis Migration repository. You can specify the repository
directory when running migrations using the --path=<repos-directory>
option. The default path is the current working directory ("./").

The repository base directory contains three subdirectories as follows:

./drivers

Place your JDBC driver .jar or .zip files in this directory. Upon running a
migration, the drivers will be dynamically loaded.

./environments

In the environments folder you will find .properties files that represent
your database instances. By default a development.properties file is
created for you to configure your development time database properties.
You can also create test.properties and production.properties files.
The environment can be specified when running a migration by using
the --env=<environment> option (without the path or ".properties" part).

The default environment is "development".

./scripts

This directory contains your migration SQL files. These are the files
that contain your DDL to both upgrade and downgrade your database
structure. By default, the directory will contain the script to
create the changelog table, plus one empty "first" migration script.
To create a new migration script, use the "new" command. To run
all pending migrations, use the "up" command. To undo the last
migration applied, use the "down" command etc.

For more information about commands and options, run the MyBatis
Migration script with the --help option.

Enjoy.
42 changes: 42 additions & 0 deletions assets/migrations/data_migration_scripts/migrate_actions.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
--clear data before performing the migration
TRUNCATE TABLE core.action CASCADE;

ALTER SEQUENCE core.action_id_seq RESTART WITH 1;

ALTER SEQUENCE core.action_metadata_id_seq RESTART WITH 1;

/* Insert into action */
insert into core.action(json)
select doc as json from couchdb
where doc->>'type'='Action';

/* Insert into action metadata */
INSERT INTO core.action_metadata(action_id, document_id, base_entity_id,server_version, provider_id,location_id,team,team_id)
select id action_id,json->>'_id' jsonument_id,
json->>'baseEntityId' as base_entity_id,
(json->>'timeStamp')::BIGINT as server_version,json->>'providerId' as provider_id,json->>'locationId' as location_id,
json->>'team' as team, json->>'teamId' as team_id
from core.action;


/*Incase of very large dataset or in low memory conditions use cursor below to insert into action_metadata
*
* The replicate the cursor to insert into other other documents types metadata
*
DO $$
DECLARE
DECLARE actions_cursor CURSOR FOR SELECT * FROM core.action;
t_action RECORD;
BEGIN
OPEN actions_cursor;
LOOP
FETCH actions_cursor INTO t_action;
EXIT WHEN NOT FOUND;
INSERT INTO core.action_metadata(action_id, document_id, base_entity_id,server_version, provider_id,location_id,team,team_id)
VALUES (t_action.id ,t_action.json->>'_id',t_action.json->>'baseEntityId',(t_action.json->>'timeStamp')::BIGINT ,
t_action.json->>'providerId',t_action.json->>'locationId',t_action.json->>'team',t_action.json->>'teamId');
END LOOP;
END$$;
*/
22 changes: 22 additions & 0 deletions assets/migrations/data_migration_scripts/migrate_alerts.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
--clear data before performing the migration
TRUNCATE TABLE core.alert CASCADE;

ALTER SEQUENCE core.alert_id_seq RESTART WITH 1;

ALTER SEQUENCE core.alert_metadata_id_seq RESTART WITH 1;

/* Insert into alert */
insert into core.alert(json)
select doc as json from couchdb
where doc->>'type'='Alert';

/* Insert into alert metadata */
INSERT INTO core.alert_metadata
(alert_id, document_id, base_entity_id,server_version, provider_id,
location_id,team,team_id,is_active,trigger_name)
select id as alert_id,json->>'_id' document_id,
json->>'entityId' as base_entity_id,
(json->>'timeStamp')::BIGINT as server_version,json->>'providerId' as provider_id,json->>'locationId' as location_id,
json->>'team' as team, json->>'teamId' as team_id,
(json->>'isActive')::bool as is_active, json->>'triggerName' as trigger_name
from core.alert;
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
--clear data before performing the migration
TRUNCATE TABLE core.app_state_token;

ALTER SEQUENCE core.app_state_token_id_seq RESTART WITH 1;

/* Insert into app_state_token */
insert into core.app_state_token(name,description,value,last_edited_date)
select doc->>'name' as name, doc->>'description' as description,
doc->>'value' as value, (doc->>'lastEditDate')::BIGINT as last_edited_date
from couchdb
where doc->>'type'='AppStateToken';
22 changes: 22 additions & 0 deletions assets/migrations/data_migration_scripts/migrate_clients.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
--clear data before performing the migration
TRUNCATE TABLE core.client CASCADE;

ALTER SEQUENCE core.client_id_seq RESTART WITH 1;

ALTER SEQUENCE core.client_metadata_id_seq RESTART WITH 1;

/* Insert into client */
insert into core.client(json,date_deleted)
select doc as json, (doc->>'dateVoided')::TIMESTAMP as date_deleted from couchdb
where doc->>'type'='Client';

/* Insert into client metadata */
INSERT INTO core.client_metadata (client_id, document_id, base_entity_id, relational_id, server_version,
openmrs_uuid, unique_id, first_name, middle_name, last_name, birth_date, date_deleted)
select id as client_id,json->>'_id' document_id,json->>'baseEntityId' as base_entity_id,
json->'relationships'->'mother'->>0 as relational_id,(json->>'serverVersion')::BIGINT as server_version,
json->'identifiers'->>'OPENMRS_UUID' as openmrs_uuid,
coalesce(json->'identifiers'->>'ZEIR_ID',json->'identifiers'->>'M_ZEIR_ID') unique_id,
json->>'firstName' as first_name, json->>'MiddleName' as middle_name, json->>'lastName' as last_name,
(json->>'birthdate')::date as birth_date, (json->>'dateVoided')::TIMESTAMP as date_deleted
from core.client;
15 changes: 15 additions & 0 deletions assets/migrations/data_migration_scripts/migrate_error_trace.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
--clear data before performing the migration
TRUNCATE TABLE error.error_trace;

ALTER SEQUENCE error.error_trace_id_seq RESTART WITH 1;

/* Insert into error_trace */
INSERT INTO error.error_trace
(document_id, date_occurred, error_type, occurred_at, stack_trace, status, record_id,
date_closed, document_type, retry_url)
select doc->>'_id' document_id, (doc->>'dateOccurred')::timestamp as date_occurred,
doc->>'errorType' as error_type, doc->>'occurredAt' as occurred_at,doc->>'stackTrace' as stack_trace,
doc->>'status' as status, doc->>'recordId' as record_id, (doc->>'dateClosed')::timestamp as date_closed,
doc->>'documentType' as document_type, doc->>'retryUrl' as retry_url
from couchdb
where doc->>'type'='ErrorTrace';
71 changes: 71 additions & 0 deletions assets/migrations/data_migration_scripts/migrate_events.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
--clear data before performing the migration
TRUNCATE TABLE core.event CASCADE;

ALTER SEQUENCE core.event_id_seq RESTART WITH 1;

ALTER SEQUENCE core.event_metadata_id_seq RESTART WITH 1;

/*Query to show constriants
SELECT CONSTRAINT_name FROM information_schema.CONSTRAINT_TABLE_usage WHERE TABLE_name = 'event_metadata';
*/

/*disable formsubmission unique CONSTRAINT*/
ALTER TABLE core.event_metadata DROP CONSTRAINT IF EXISTS event_metadata_form_submission_id_key ;

/* Insert into event */
insert into core.event(json,date_deleted)
select doc as json, (doc->>'dateVoided')::TIMESTAMP as date_deleted from couchdb
where doc->>'type'='Event';

/* Insert into event metadata */
INSERT INTO core.event_metadata
( event_id, document_id, base_entity_id, form_submission_id, server_version,
openmrs_uuid, event_type, event_date, entity_type, provider_id, location_id,
team, team_id, date_created, date_edited, date_deleted)
select id as event_id,json->>'_id' document_id,
json->>'baseEntityId' as base_entity_id,json->>'formSubmissionId' as form_submission_id,
(json->>'serverVersion')::BIGINT as server_version,json->'identifiers'->>'OPENMRS_UUID' as openmrs_uuid,
json->>'eventType' as event_type,(json->>'eventDate')::TIMESTAMP as event_date,
json->>'entityType' as entity_type,json->>'providerId' as provider_id,json->>'locationId' as location_id,
json->>'team' as team, json->>'teamId' as team_id,(json->>'dateCreated')::TIMESTAMP as date_created,
(json->>'dateEdited')::TIMESTAMP as date_edited,(json->>'dateVoided')::TIMESTAMP as date_deleted
from core.event;

/* create and index to help identify duplicates faster*/
CREATE INDEX event_metadata_duplicate_index on core.event_metadata(form_submission_id,server_version);

/*delete duplicates in events and event_medatadata.
* Disable cascade(fk) triggers for better performance(faster delete) */
DO $$
DECLARE duplicate_events_cursor CURSOR FOR
select e.event_id,e.id from core.event_metadata e
JOIN (
SELECT
form_submission_id,
max(server_version) server_version
FROM core.event_metadata
GROUP BY form_submission_id
HAVING count(*) > 1
) d on e.form_submission_id=d.form_submission_id and e.server_version<d.server_version;
rec RECORD;
BEGIN
ALTER TABLE core.event_metadata DISABLE TRIGGER ALL;
ALTER TABLE core.event DISABLE TRIGGER ALL;
OPEN duplicate_events_cursor;
LOOP
FETCH duplicate_events_cursor into rec;
EXIT WHEN NOT FOUND;
delete from core.event_metadata where id =rec.id;
delete from core.event where id =rec.event_id;
END LOOP;
CLOSE duplicate_events_cursor;
ALTER TABLE core.event_metadata ENABLE TRIGGER ALL;
ALTER TABLE core.event ENABLE TRIGGER ALL;
END;
$$

/*drop the duplicates index*/
DROP INDEX core.event_metadata_duplicate_index;

/*Enable formsubmission unique CONSTRAINT*/
ALTER TABLE core.event_metadata ADD UNIQUE (form_submission_id);
13 changes: 13 additions & 0 deletions assets/migrations/data_migration_scripts/migrate_multimedia.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
--clear data before performing the migration
TRUNCATE TABLE core.multi_media;

ALTER SEQUENCE core.multi_media_id_seq RESTART WITH 1;

/* Insert into multi_media */
INSERT INTO core.multi_media
(document_id, case_id, provider_id, content_type, file_path, file_category)
select doc->>'_id' document_id, doc->>'caseId' as base_entity_id,
doc->>'providerId' as provider_id,doc->>'contentType' as content_type,
doc->>'filePath' as file_path, doc->>'fileCategory' as file_category
from couchdb
where doc->>'type'='Multimedia';
25 changes: 25 additions & 0 deletions assets/migrations/data_migration_scripts/migrate_reports.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
--clear data before performing the migration
TRUNCATE TABLE core.report CASCADE;

ALTER SEQUENCE core.report_id_seq RESTART WITH 1;

ALTER SEQUENCE core.report_metadata_id_seq RESTART WITH 1;

/* Insert into report */
insert into core.report(json)
select doc as json from couchdb
where doc->>'type'='Report';

/* Insert into report metadata */

/* Insert into report metadata */
INSERT INTO core.report_metadata
(report_id, document_id,form_submission_id, base_entity_id,server_version,report_type,report_date, provider_id,
location_id,team,team_id,date_edited)
select id as report_id,json->>'_id' document_id,
json->>'formSubmissionId' as form_submission_id,json->>'baseEntityId' as base_entity_id,
(json->>'serverVersion')::BIGINT as server_version,
json->>'reportType' as report_type,(json->>'reportDate')::date as report_date,
json->>'providerId' as provider_id,json->>'locationId' as location_id,
json->>'team' as team, json->>'teamId' as team_id,(json->>'dateEdited')::date as date_edited
from core.report;
18 changes: 18 additions & 0 deletions assets/migrations/data_migration_scripts/migrate_stock.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
--clear data before performing the migration
TRUNCATE TABLE core.stock CASCADE;

ALTER SEQUENCE core.stock_id_seq RESTART WITH 1;

ALTER SEQUENCE core.stock_metadata_id_seq RESTART WITH 1;

/* Insert into stock */
insert into core.stock(json)
select doc as json from couchdb
where doc->>'type'='Stock';

/* Insert into stock metadata */
INSERT INTO core.stock_metadata(stock_id, document_id, server_version, provider_id,location_id,team,team_id)
select id as stock_id,json->>'_id' document_id,
(json->>'serverVersion')::BIGINT as server_version,json->>'providerid' as provider_id,json->>'locationId' as location_id,
json->>'team' as team, json->>'teamId' as team_id
from core.stock;
Binary file added assets/migrations/drivers/postgresql-42.2.1.jar
Binary file not shown.
Loading

0 comments on commit 08ec4b4

Please sign in to comment.