Tuesday, December 13, 2016

INSERT ALL and sequence NextVal

drop table t1 purge;
drop table t2 purge;
drop table t3 purge;

create table t1(n1 number, n2 number);
create table t2(n1 number, n2 number);
create table t3(n1 number, n2 number);

create sequence s1;

Problem:

INSERT ALL
    INTO t1(n1, n2) values(n1_seq, nn)
    INTO t2(n1, n2) values(n1_seq, nn) 
select s1.nextval n1_seq, rownum*10 nn from dual connect by level <= 2;

Error report -
SQL Error: ORA-02287: sequence number not allowed here
02287. 00000 -  "sequence number not allowed here"
*Cause:    The specified sequence number (CURRVAL or NEXTVAL) is inappropriate
           here in the statement.
*Action:   Remove the sequence number.

Fix:

INSERT ALL
    INTO t1(n1, n2) values(s1.nextval, nn)
    INTO t2(n1, n2) values(s1.currval, nn) 
    INTO t3(n1, n2) values(s1.currval, nn) 
select rownum*10 nn from dual connect by level <= 2;

select * from t1;
select * from t2;
select * from t3;

Friday, October 14, 2016

When ANSI join comes to play?

ANSI join has been around for many years.
There are a couple of situations better off using ANSI join rather than Oracle native join.

1. Join many tables.

Reason:

Use explicit JOINs rather than implicit (regardless whether they are outer joins or not) is that it's much easier to accidentally create a cartesian product with the implicit joins. With explicit JOINs you cannot "by accident" create one. The more tables are involved the higher the risk is that you miss one join condition.

2. Outer join.

Reason:

Basically (+) is severely limited compared to ANSI joins. ANSI syntax is cleaner - you are not going to normal join if you forget (+) in some multi-column outer join.
In the past there were some bugs with ANSI syntax but if you go with latest 11.2 or 12.1 that should be fixed already.


Example:

SELECT *
  FROM table_a a 
  JOIN table_b b 
    ON a.col1 = b.col1
  JOIN table_c c
    ON b.col2 = c.col2;

Example 2:

SELECT d.department_id, e.last_name, j.job_name
  FROM departments d 
  LEFT OUTER JOIN employees e
    ON d.department_id = e.department_id
  LEFT OUTER JOIN job j
    ON e.job_id = j.job_id
;


Wednesday, July 13, 2016

Join 3 tables - Oracle native outer join v.s. ANSI outer join

select t1.id t1_id, t2.id t2_id, t3.id t3_id
from t1,t2,t3
where t1.id = t2.id(+)
and t2.id = t3.id(+)
--order by 1
;

     T1_ID      T2_ID      T3_ID
---------- ---------- ----------
         1          1          1
         3                      
         2          2           


select t1.id t1_id, t2.id t2_id, t3.id t3_id
from t1
left join t2 on t1.id = t2.id
left join t3 on t3.id = t2.id
--order by 1
;

     T1_ID      T2_ID      T3_ID
---------- ---------- ----------
         1          1          1
         3                      
         2          2           

Observation: They return same result.


Fyi,  init testing data.

drop table t1 purge;
drop table t2 purge;
drop table t3 purge;
create table t1 (id number);
create table t2 (id number);
create table t3 (id number);
insert into t1(id) select rownum from dual connect by level <= 3;
insert into t2(id) select rownum from dual connect by level <= 2;
insert into t3(id) select rownum from dual connect by level <= 1;
commit;

Thursday, March 03, 2016

Refactoring PL/SQL Table Function demo

"Refactoring is the process of changing a software system in such a way that it does not alter the external behavior of the code yet improves its internal structure." -- Martin Fowler in Refactoring: Improving The Design Of Existing Code

Here is an example how I build PL/SQL table function usually.

Before refactoring the code, we have to write the SQL in 2 places, it is a repetition.

create or replace PACKAGE demo_pkg
AS
  CURSOR gc_data IS select 1 col1, 2 col2 from dual;
  type data_tab is table of gc_data%RowType;

  FUNCTION f1(p_a number)
    RETURN data_tab PIPELINED;
end;
/


CREATE OR REPLACE PACKAGE BODY demo_pkg
AS
  FUNCTION f1(p_a number)
    RETURN data_tab PIPELINED
  is
  BEGIN
    FOR lr in (select 1 col1, 2 col2 from dual where p_a > 5)
    LOOP
      pipe ROW(lr);
    END LOOP;
    RETURN;
  EXCEPTION
  WHEN OTHERS THEN
    /* Don't forget to clean up here, too! */
    /* Standard error logging mechanism */
    --logger.log_error('Unhandled Exception', l_scope);
    raise;
  END;
end;
/


After refactoring the code, we only need to write the SQL(cursor) one time,  removed unnecessary repetition.

create or replace PACKAGE demo_pkg
AS
  CURSOR gc_data (p_a number) IS

    select 1 col1, 2 col2, 3 col3 from dual where p_a > 5;
  type data_tab is table of gc_data%RowType;

  FUNCTION f1(p_a number)
    RETURN data_tab PIPELINED;
end;
/


CREATE OR REPLACE PACKAGE BODY demo_pkg
AS
  FUNCTION f1(p_a number)
    RETURN data_tab PIPELINED
  is
    lr gc_data%rowtype;
  BEGIN
    OPEN gc_data(p_a);
    LOOP
      FETCH gc_data INTO lr;
      pipe ROW(lr);
      EXIT WHEN gc_data%NotFound;
    END LOOP;
    close gc_data;
    RETURN;
  EXCEPTION
  WHEN OTHERS THEN
    /* Don't forget to clean up here, too! */
    /* Standard error logging mechanism */
    --logger.log_error('Unhandled Exception', l_scope);
    raise;
  END;
end;
/


Observed:

This makes the output DRY, but for input, if we add/update/remove parameters, we have to update 2 places.

Reference:

1) http://www.refactoring.com/
2) call example: select col1, col2 from table(demo_pkg.f1(18));

Thursday, February 25, 2016

flex CSV loader

Purpose: Load any structure CSV file in Meteor to MongoDB, through package harrison:papa-parse.

meteor add harrison:papa-parse
meteor add themeteorchef:bert
meteor add reactive-var
meteor add fortawesome:fontawesome

$> meteor create csv

Then copy below 2 files into ./csv/

  • csv.html

<head>
  <title>cvs</title>
</head>

<body>
  <h1>Welcome to CSV loader!</h1>

  {{> upload}}
</body>

<template name="upload">
  <h4 class="page-header">Upload a CSV, unfixed structure.</h4>

  {{#unless uploading}}
    <input type="file" name="upload_CSV">
  {{else}}
    <p><i class="fa fa-spin fa-refresh"></i> Uploading files...</p>
  {{/unless}}
</template>


  • csv.js

Order = new Mongo.Collection('order');
if (Meteor.isClient) {
  Template.upload.onCreated( () => {
    Template.instance().uploading = new ReactiveVar( false );
  });

  Template.upload.helpers({
    uploading() {
      return Template.instance().uploading.get();
    }
  });

  Template.upload.events({
    'change [name="upload_CSV"]' (event, template ) {
      Papa.parse(event.target.files[0], {
        header: true,
        complete(results, file) {
          Meteor.call('parseUpload', results.data, (error, response) => {
            if (error) {
              Bert.alert(error.reason, 'warning');
            } else {
              template.uploading.set(false);
              Bert.alert('Upload complete!', 'success', 'growl-top-right' );
            }
          });
        }
      });
    }
  });

}
if (Meteor.isServer) {
  Meteor.startup(function () {
    // code to run on server at startup
  });

  Meteor.methods({
    parseUpload(data){
      //check(data, Array);

      for ( let i = 0; i < data.length; i++) {
        let item = data[i],
            exists = Order.findOne({orderId: item.order_no});
        if ( (item.order_no === undefined) || !exists ) {
          Order.insert(item);
        } else {
          console.warn( 'Rejected. This item already exists.');
          console.log(exists.Age + "," + item.order_no);
        }
      }
    }
  });
}

Wednesday, February 24, 2016

Load CSV file in Meteor

Purpose: Demo how to load a CVS file into MongoDB.

$> meteor create load_csv

Then copy below 2 files into ./load_csv/

load_csv.html

<head>
  <title>load_csv</title>
</head>

<body>
 <h1>Welcome to CSV loader!</h1>
 {{> read_file_orders}}
 {{> order}}
</body>

<template name="read_file_orders">
 <form class="well form-inline">
  <input class="input-file" id="fileInput2" type="file" name="files[]">
  <Button class="btn btn-primary" id="read_orders">Import</button>
  <button>clear file name</button>
 </form>
</template>

<template name="order">
  <ul>
  {{#each order}}
    <li>{{id}}: {{value}}</li>
  {{/each}}
  </ul>
</template>


load_csv.js

Orders = new Mongo.Collection("orders");
if (Meteor.isClient) {
// counter starts at 0

  Template.read_file_orders.events({
   "click #read_orders" : function(e) {
    var f = document.getElementById('fileInput2').files[0];
    console.log("read file");
    readFile(f, function(content) {
      Meteor.call('upload',content);
    });
   }
  });

  Template.order.helpers({
    'order': function(){
     return Orders.find();
    }
  });

  readFile = function(f,onLoadCallback) {
    //When the file is loaded the callback is called with the contents as a string
    var reader = new FileReader();
    reader.onload = function (e){
      var contents=e.target.result
      onLoadCallback(contents);
    }
    reader.readAsText(f);
  };
}

if (Meteor.isServer) {
  Meteor.startup(function () {
  // code to run on server at startup

  });
  Meteor.methods({
    upload : function(fileContent) {
     console.log("start insert");
     import_file_orders(fileContent);
     console.log("completed");
    }
  });

  import_file_orders = function(file) {
   console.log("enter function import_file_orders")
   var lines = file.split(/\r\n|\n/);
   var l = lines.length;
   for (var i=0; i < l; i++) {
    var line = lines[i];
    var line_parts = line.split(',');
    var id = line_parts[0];
    var value = line_parts[1];
    var result = Orders.insert({id:id, value:value});
    console.log(Orders.findOne(result));
   }
  };

}