Wednesday, July 13, 2016

Join 3 tables - Oracle native outer join v.s. ANSI outer join

select t1.id t1_id, t2.id t2_id, t3.id t3_id
from t1,t2,t3
where t1.id = t2.id(+)
and t2.id = t3.id(+)
--order by 1
;

     T1_ID      T2_ID      T3_ID
---------- ---------- ----------
         1          1          1
         3                      
         2          2           


select t1.id t1_id, t2.id t2_id, t3.id t3_id
from t1
left join t2 on t1.id = t2.id
left join t3 on t3.id = t2.id
--order by 1
;

     T1_ID      T2_ID      T3_ID
---------- ---------- ----------
         1          1          1
         3                      
         2          2           

Observation: They return same result.


Fyi,  init testing data.

drop table t1 purge;
drop table t2 purge;
drop table t3 purge;
create table t1 (id number);
create table t2 (id number);
create table t3 (id number);
insert into t1(id) select rownum from dual connect by level <= 3;
insert into t2(id) select rownum from dual connect by level <= 2;
insert into t3(id) select rownum from dual connect by level <= 1;
commit;

Thursday, March 03, 2016

Refactoring PL/SQL Table Function demo

"Refactoring is the process of changing a software system in such a way that it does not alter the external behavior of the code yet improves its internal structure." -- Martin Fowler in Refactoring: Improving The Design Of Existing Code

Here is an example how I build PL/SQL table function usually.

Before refactoring the code, we have to write the SQL in 2 places, it is a repetition.

create or replace PACKAGE demo_pkg
AS
  CURSOR gc_data IS select 1 col1, 2 col2 from dual;
  type data_tab is table of gc_data%RowType;

  FUNCTION f1(p_a number)
    RETURN data_tab PIPELINED;
end;
/


CREATE OR REPLACE PACKAGE BODY demo_pkg
AS
  FUNCTION f1(p_a number)
    RETURN data_tab PIPELINED
  is
  BEGIN
    FOR lr in (select 1 col1, 2 col2 from dual where p_a > 5)
    LOOP
      pipe ROW(lr);
    END LOOP;
    RETURN;
  EXCEPTION
  WHEN OTHERS THEN
    /* Don't forget to clean up here, too! */
    /* Standard error logging mechanism */
    --logger.log_error('Unhandled Exception', l_scope);
    raise;
  END;
end;
/


After refactoring the code, we only need to write the SQL(cursor) one time,  removed unnecessary repetition.

create or replace PACKAGE demo_pkg
AS
  CURSOR gc_data (p_a number) IS

    select 1 col1, 2 col2, 3 col3 from dual where p_a > 5;
  type data_tab is table of gc_data%RowType;

  FUNCTION f1(p_a number)
    RETURN data_tab PIPELINED;
end;
/


CREATE OR REPLACE PACKAGE BODY demo_pkg
AS
  FUNCTION f1(p_a number)
    RETURN data_tab PIPELINED
  is
    lr gc_data%rowtype;
  BEGIN
    OPEN gc_data(p_a);
    LOOP
      FETCH gc_data INTO lr;
      pipe ROW(lr);
      EXIT WHEN gc_data%NotFound;
    END LOOP;
    close gc_data;
    RETURN;
  EXCEPTION
  WHEN OTHERS THEN
    /* Don't forget to clean up here, too! */
    /* Standard error logging mechanism */
    --logger.log_error('Unhandled Exception', l_scope);
    raise;
  END;
end;
/


Observed:

This makes the output DRY, but for input, if we add/update/remove parameters, we have to update 2 places.

Reference:

1) http://www.refactoring.com/
2) call example: select col1, col2 from table(demo_pkg.f1(18));

Thursday, February 25, 2016

flex CSV loader

Purpose: Load any structure CSV file in Meteor to MongoDB, through package harrison:papa-parse.

meteor add harrison:papa-parse
meteor add themeteorchef:bert
meteor add reactive-var
meteor add fortawesome:fontawesome

$> meteor create csv

Then copy below 2 files into ./csv/

  • csv.html

<head>
  <title>cvs</title>
</head>

<body>
  <h1>Welcome to CSV loader!</h1>

  {{> upload}}
</body>

<template name="upload">
  <h4 class="page-header">Upload a CSV, unfixed structure.</h4>

  {{#unless uploading}}
    <input type="file" name="upload_CSV">
  {{else}}
    <p><i class="fa fa-spin fa-refresh"></i> Uploading files...</p>
  {{/unless}}
</template>


  • csv.js

Order = new Mongo.Collection('order');
if (Meteor.isClient) {
  Template.upload.onCreated( () => {
    Template.instance().uploading = new ReactiveVar( false );
  });

  Template.upload.helpers({
    uploading() {
      return Template.instance().uploading.get();
    }
  });

  Template.upload.events({
    'change [name="upload_CSV"]' (event, template ) {
      Papa.parse(event.target.files[0], {
        header: true,
        complete(results, file) {
          Meteor.call('parseUpload', results.data, (error, response) => {
            if (error) {
              Bert.alert(error.reason, 'warning');
            } else {
              template.uploading.set(false);
              Bert.alert('Upload complete!', 'success', 'growl-top-right' );
            }
          });
        }
      });
    }
  });

}
if (Meteor.isServer) {
  Meteor.startup(function () {
    // code to run on server at startup
  });

  Meteor.methods({
    parseUpload(data){
      //check(data, Array);

      for ( let i = 0; i < data.length; i++) {
        let item = data[i],
            exists = Order.findOne({orderId: item.order_no});
        if ( (item.order_no === undefined) || !exists ) {
          Order.insert(item);
        } else {
          console.warn( 'Rejected. This item already exists.');
          console.log(exists.Age + "," + item.order_no);
        }
      }
    }
  });
}

Wednesday, February 24, 2016

Load CSV file in Meteor

Purpose: Demo how to load a CVS file into MongoDB.

$> meteor create load_csv

Then copy below 2 files into ./load_csv/

load_csv.html

<head>
  <title>load_csv</title>
</head>

<body>
 <h1>Welcome to CSV loader!</h1>
 {{> read_file_orders}}
 {{> order}}
</body>

<template name="read_file_orders">
 <form class="well form-inline">
  <input class="input-file" id="fileInput2" type="file" name="files[]">
  <Button class="btn btn-primary" id="read_orders">Import</button>
  <button>clear file name</button>
 </form>
</template>

<template name="order">
  <ul>
  {{#each order}}
    <li>{{id}}: {{value}}</li>
  {{/each}}
  </ul>
</template>


load_csv.js

Orders = new Mongo.Collection("orders");
if (Meteor.isClient) {
// counter starts at 0

  Template.read_file_orders.events({
   "click #read_orders" : function(e) {
    var f = document.getElementById('fileInput2').files[0];
    console.log("read file");
    readFile(f, function(content) {
      Meteor.call('upload',content);
    });
   }
  });

  Template.order.helpers({
    'order': function(){
     return Orders.find();
    }
  });

  readFile = function(f,onLoadCallback) {
    //When the file is loaded the callback is called with the contents as a string
    var reader = new FileReader();
    reader.onload = function (e){
      var contents=e.target.result
      onLoadCallback(contents);
    }
    reader.readAsText(f);
  };
}

if (Meteor.isServer) {
  Meteor.startup(function () {
  // code to run on server at startup

  });
  Meteor.methods({
    upload : function(fileContent) {
     console.log("start insert");
     import_file_orders(fileContent);
     console.log("completed");
    }
  });

  import_file_orders = function(file) {
   console.log("enter function import_file_orders")
   var lines = file.split(/\r\n|\n/);
   var l = lines.length;
   for (var i=0; i < l; i++) {
    var line = lines[i];
    var line_parts = line.split(',');
    var id = line_parts[0];
    var value = line_parts[1];
    var result = Orders.insert({id:id, value:value});
    console.log(Orders.findOne(result));
   }
  };

}

Tuesday, December 22, 2015

Golang Oracle Performance Extensions

Purpose: Demo Go Oracle database driver OCI Performance Extensions.

We will do a benchmark and show you the improvement before and after enabling Prefetching rows.
Also use SQL session trace file to prove that Update Batching works, than is insert many rows at one time.

Info:

Update Batching

You can reduce the number of round-trips to the database, thereby improving application performance, by grouping multiple UPDATE, DELETE, or INSERT statements into a single batch and having the whole batch sent to the database and processed in one trip. This is referred to as 'update batching'.

Prefetching rows

This reduces round-trips to the database by fetching multiple rows of data each time data is fetched. The extra data is stored in client-side buffers for later access by the client. The number of rows to prefetch can be set as desired.
...

Code:

// Copyright 2015, Author: Charlie Database Craftsman. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

/*
Package main demo 2 Oracle OCI Performance Extensions: array interface and Prefetching rows.

- Update Batching
It calls StmtCfg.SetPrefetchRowCount to set Prefetching size for SELECT query.
- Prefetching rows
It is binding slice type data, to pass array to database in single SQL to do Bulk DML, in one context switch.

Notes:
stmtQry.Cfg().SetPrefetchRowCount(uint32(200))  works,
ses.Cfg().StmtCfg.SetPrefetchRowCount(2000) works,

EnvCfg -> SrvCfg are both not work, because code issue, NewStmtCfg() overwrite Env/Srv on session level.
Env.StmtCfg.SetPrefetchRowCount not working.


| Modification History:
|  Date        Who          What
| 11-Jan-2016: Charlie(Yi): Abstract checkErr function,
| 18-Dec-2015: Charlie(Yi): create the package,

*/
package main


import (
 "fmt"
 "gopkg.in/rana/ora.v3"
)


func main() {
 // example usage of the ora package driver
 // connect to a server and open a session
 env, err := ora.OpenEnv(nil)
 defer env.Close()
 checkErr(err)

 srvCfg := ora.NewSrvCfg()
 srvCfg.Dblink = "//localhost/orcl"

 srv, err := env.OpenSrv(srvCfg)
 defer srv.Close()
 checkErr(err)

 sesCfg := ora.NewSesCfg()
 sesCfg.Username = "scott"
 sesCfg.Password = "tiger"
 fmt.Println("Session PrefetchRowCount :", sesCfg.StmtCfg.PrefetchRowCount())
 sesCfg.StmtCfg.SetPrefetchRowCount(uint32(2000))
 ses, err := srv.OpenSes(sesCfg)
 fmt.Println("connected")
 defer ses.Close()
 checkErr(err)
 fmt.Println("Session PrefetchRowCount :", ses.Cfg().StmtCfg.PrefetchRowCount())
 err = ses.Cfg().StmtCfg.SetPrefetchRowCount(uint32(1000))
 checkErr(err)
 fmt.Println("Session PrefetchRowCount :", ses.Cfg().StmtCfg.PrefetchRowCount())

 stmtTbl, err := ses.Prep(
  `declare
  l_sql varchar2(32767);
  l_cnt pls_integer;
begin
  l_sql := 'drop TABLE emp_go purge';
  select count(*) into l_cnt from user_tables where table_name='EMP_GO';
  if l_cnt > 0 then
    execute immediate l_sql;
  end if;
end;`)
 defer stmtTbl.Close()
 checkErr(err)
 rowsAffected, err := stmtTbl.Exe()
 checkErr(err)
 fmt.Println(rowsAffected, " rows Affected. drop table emp_go if exists.")

 rowsAffected, err = ses.PrepAndExe("CREATE TABLE emp_go (empno number(5,0), ename VARCHAR2(50))")
 checkErr(err)
 fmt.Println("table emp_go created")

 rowsAffected, err = ses.PrepAndExe("ALTER SESSION SET TRACEFILE_IDENTIFIER='go1'")
 checkErr(err)
 //SELECT VALUE FROM V$DIAG_INFO WHERE NAME = 'Default Trace File';
 rowsAffected, err = ses.PrepAndExe("begin dbms_monitor.session_trace_enable( WAITS=>TRUE, binds=>true); end;")
 checkErr(err)

 tx1, err := ses.StartTx()
 stmt, err := ses.Prep("INSERT INTO emp_go (empno, ename) VALUES (:N1, :C1)")
 defer stmt.Close()
 checkErr(err)

 fmt.Println("Demo bulk/batch insert. Bulk DML with slice (array in Go)")
 l_no := make([]int64, 4)
 l_name := make([]ora.String, 4)

 for n, _ := range l_no {
  l_no[n] = int64(n)
  l_name[n] = ora.String{Value: fmt.Sprintf("Mr. %v", n)}
  fmt.Println(n)
 }
 rowsAffected, err = stmt.Exe(l_no, l_name)
 checkErr(err)
 fmt.Println(rowsAffected, " rows add.")
 tx1.Commit()
 fmt.Println("commit")

 fmt.Println("Demo fetch records")
 stmtQry, err := ses.Prep("SELECT /* set PrefetchRowCount = 0 */ empno, ename FROM emp_go")
 defer stmtQry.Close()
 checkErr(err)
 err = stmtQry.Cfg().SetPrefetchRowCount(uint32(0))
 checkErr(err)

 fmt.Println("stmtQry.Cfg().PrefetchRowCount default:", stmtQry.Cfg().PrefetchRowCount())
 rset, err := stmtQry.Qry()
 checkErr(err)

 for rset.Next() {
  fmt.Println(rset.Row[0], rset.Row[1])
 }
 checkErr(rset.Err)

 stmtQry, err = ses.Prep("SELECT /* set PrefetchRowCount = 200 */ empno, ename FROM emp_go")
 defer stmtQry.Close()
 checkErr(err)
 err = stmtQry.Cfg().SetPrefetchRowCount(uint32(200))
 checkErr(err)
 fmt.Println("stmtQry.Cfg().SetPrefetchRowCount(200)", stmtQry.Cfg().PrefetchRowCount())

 rset, err = stmtQry.Qry()
 checkErr(err)

 for rset.Next() {
  fmt.Println(rset.Row[0], rset.Row[1])
 }
 checkErr(rset.Err)

 rowsAffected, err = ses.PrepAndExe("begin dbms_monitor.session_trace_disable(); end;")
 checkErr(err)

 rset, err = ses.PrepAndQry("SELECT VALUE FROM V$DIAG_INFO WHERE NAME = 'Default Trace File'")
 for rset.Next() {
  fmt.Println(rset.Row[0])
 }

}


func checkErr(err error) {
 if err != nil {
  panic(err)
 }
}


Output:

Session PrefetchRowCount : 0
connected
Session PrefetchRowCount : 2000
Session PrefetchRowCount : 1000
0  rows Affected. drop table emp_go if exists.
table emp_go created
Demo bulk/batch insert. Bulk DML with slice (array in Go)
0
1
2
3
4  rows add.
commit
Demo fetch records
stmtQry.Cfg().PrefetchRowCount default: 0
0 Mr. 0
1 Mr. 1
2 Mr. 2
3 Mr. 3
stmtQry.Cfg().SetPrefetchRowCount(200) 200
0 Mr. 0
1 Mr. 1
2 Mr. 2
3 Mr. 3
/home/oracle/app/oracle/diag/rdbms/cdb1/cdb1/trace/cdb1_ora_17092_go1.trc

.
SQL session trace:

tkprof /home/oracle/app/oracle/diag/rdbms/cdb1/cdb1/trace/cdb1_ora_17092_go1.trc rpt.txt
.
TKProf report:
.
Before,  PrefetchRowCount = 0.

SELECT /* set PrefetchRowCount = 0 */ empno, ename
FROM emp_go


call     count       cpu    elapsed       disk      query    current        rows
------- ------  -------- ---------- ---------- ---------- ----------  ----------
Parse        1      0.00       0.00          0          1          0           0
Execute      1      0.00       0.00          0          0          0           0
Fetch        3      0.00       0.00          0          9          0           4
------- ------  -------- ---------- ---------- ---------- ----------  ----------
total        5      0.00       0.00          0         10          0           4

.
After SetPrefetchRowCount = 200.
.
SELECT /* set PrefetchRowCount = 200 */ empno, ename
FROM emp_go


call     count       cpu    elapsed       disk      query    current        rows
------- ------  -------- ---------- ---------- ---------- ----------  ----------
Parse        1      0.00       0.00          0          1          0           0
Execute      1      0.00       0.00          0          0          0           0
Fetch        1      0.00       0.00          0          7          0           4
------- ------  -------- ---------- ---------- ---------- ----------  ----------
total        3      0.00       0.00          0          8          0           4

.
Bulk INSERT,
.
INSERT INTO emp_go (empno, ename)
VALUES (:N1, :C1)


call     count       cpu    elapsed       disk      query    current        rows
------- ------  -------- ---------- ---------- ---------- ----------  ----------
Parse        1      0.00       0.00          0          0          0           0
Execute      1      0.00       0.00          0        
     4         43           4
Fetch        0      0.00       0.00          0          0          0           0
------- ------  -------- ---------- ---------- ---------- ----------  ----------
total        2      0.00       0.00        


Reference:
.
Oracle driver basic, http://mujiang.blogspot.com/2015/12/golang-connect-to-oracle-database.html
driver readme, https://github.com/rana/ora/blob/master/README.md
Performance, http://docs.oracle.com/database/121/JJDBC/oraperf.htm

Friday, December 18, 2015

Golang connect to Oracle database

Purpose: setup Go Oracle driver, and a complete code to demo some basic DDL, DML, and bulk batching processing.

Minimum requirements are
- Go 1.3 with CGO enabled,
- a GCC C compiler,
- and Oracle 11g (11.2.0.4.0) or Oracle Instant Client (11.2.0.4.0).

This demo is tested on Go 1.5.2, Oracle Virtual Box, imported OTN_Developer_Day_VM.ova appliance/image file.

** environment settings **

------- Linux ----------
export GOROOT=/usr/local/go
#export GOPATH=/home/oracle/go
export GOPATH=/media/sf_GTD/Project/Go
export PATH=$PATH:/usr/local/go/bin:$GOPATH/bin
mkdir -p $GOPATH/src/github.com/user/hello
cd $GOPATH/src/github.com/user/hello
echo $PATH
echo $GOPATH
# Oracle client driver, environment parameters
# Set the CGO_CFLAGS and CGO_LDFLAGS environment variables to locate the OCI headers and library,
export CGO_CFLAGS=-I$ORACLE_HOME/rdbms/public
export CGO_LDFLAGS="-L$ORACLE_HOME/lib -lclntsh"

-----------
-- install Oracle driver --
go get github.com/rana/ora

-- manual install driver from source zip files --
download source zip files, and extract into $GOPATH/src/gopkg.in/rana/ora.v3/ , or rename ora-master to ora.v3

download address: https://github.com/rana/ora/tree/v3

The files structure will be looks like,
src\gopkg.in\rana\ora.v3\
                         ora.go
                         env.go
                         ...
                         \examples\...


To import this package, add the following line to your code:
import "gopkg.in/rana/ora.v3"

Code:

// Copyright 2015, Author: Charlie Database Craftsman. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

/*
Package main demo OCI array interface.
It connects to a database, create table, insert data, and query the data.

E:\GTD\Project\Go\src\github.com\user\hello\oracle_demo.go
*/
package main


import (
 "fmt"
 "gopkg.in/rana/ora.v3"
)


func main() {
 // example usage of the ora package driver
 // connect to a server and open a session
 env, err := ora.OpenEnv(nil)
 defer env.Close()
 if err != nil {
  panic(err)
 }
 srvCfg := ora.NewSrvCfg()
 srvCfg.Dblink = "//localhost/orcl"
 srv, err := env.OpenSrv(srvCfg)
 defer srv.Close()
 if err != nil {
  panic(err)
 }
 sesCfg := ora.NewSesCfg()
 sesCfg.Username = "scott"
 sesCfg.Password = "tiger"
 ses, err := srv.OpenSes(sesCfg)
 fmt.Println("connected")
 defer ses.Close()
 if err != nil {
  panic(err)
 }

 //StmtCfg.PrefetchRowCount = 1000
 stmtTbl, err := ses.Prep(
  `declare
  l_sql varchar2(32767);
  l_cnt pls_integer;
begin
  l_sql := 'drop TABLE emp_go purge';
  select count(*) into l_cnt from user_tables where table_name='EMP_GO';
  if l_cnt > 0 then
    execute immediate l_sql;
  end if;
end;`)
 defer stmtTbl.Close()
 if err != nil {
  panic(err)
 }
 rowsAffected, err := stmtTbl.Exe()
 if err != nil {
  panic(err)
 }
 fmt.Println(rowsAffected, " rows Affected. drop table emp_go if exists.")

 rowsAffected, err = ses.PrepAndExe("CREATE TABLE emp_go (empno number(5,0), ename VARCHAR2(50))")
 if err != nil {
  panic(err)
 }
 fmt.Println("table emp_go created")

 tx1, err := ses.StartTx()
 rowsAffected, err = ses.PrepAndExe("delete emp_go")
 tx1.Commit()

 stmt, err := ses.Prep("INSERT INTO emp_go (empno, ename) VALUES (:N1, :C1)")
 defer stmt.Close()
 rowsAffected, err = stmt.Exe(uint64(1001), "Charlie")
 rowsAffected, err = stmt.Exe(uint64(1002), "Vicky")
 if err != nil {
  panic(err)
 }
 fmt.Println(rowsAffected, "add")

 tx1.Commit()
 tx1.Rollback()
 fmt.Println("commit")

 fmt.Println("Demo fetch records")
 stmtQry, err := ses.Prep("SELECT empno, ename FROM emp_go")
 defer stmtQry.Close()
 if err != nil {
  panic(err)
 }
 rset, err := stmtQry.Qry()
 if err != nil {
  panic(err)
 }
 for rset.Next() {
  fmt.Println(rset.Row[0], rset.Row[1])
 }
 if rset.Err != nil {
  panic(rset.Err)
 }
}


Output:

$> go run oracle_demo.go

connected
0  rows Affected. drop table emp_go if exists.
table emp_go created
1 add
commit
Demo fetch records
1001 Charlie
1002 Vicky

.

Reference:
https://github.com/rana/ora/blob/master/README.md
http://gopkg.in/rana/ora.v3

Tuesday, December 15, 2015

Golang range and close channel

Code:

package main

import "golang.org/x/tour/tree"
import "fmt"

// Walk walks the tree t sending all values
// from the tree to the channel ch.

func Walk(t *tree.Tree, ch chan int) {
 if t == nil {
  fmt.Println("Null")
  return
 }
 if t.Left != nil {
  Walk(t.Left, ch)
 }
 ch <- t.Value
 fmt.Println("send ", t.Value)
 if t.Right != nil {
  Walk(t.Right, ch)
 }
}



func main() {
 ta := tree.New(1)
 ca := make(chan int)
 go func() {
  Walk(ta, ca)
  close(ca)
 }()


 for v := range ca {
  fmt.Println("get ", v)
 }
}


Comments:

See which line the channel is closed.