Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
No results found
Show changes
Showing
with 1487 additions and 0 deletions
package githubsqlfinder;
import java.util.ArrayList;
import java.util.List;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Map;
import java.util.HashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.tree.*;
import com.google.cloud.bigquery.FieldValueList;
import com.google.cloud.bigquery.TableResult;
import antlr.*;
import antlr.PhpParser.ArithmeticExpressionContext;
import antlr.PhpParser.ChainExpressionContext;
import antlr.PhpParser.StringContext;
public class App {
static List<String> stringStatements = new ArrayList<>();
static Map<String, List<String>> stringStatementsWithParts = new HashMap<>();
static int paramIndex = 0;
static String query = "";
static long githubTimeMs = 0;
static long antlrTimeMs = 0;
static long parsingTreeTimeMs = 0;
static int numberOfAddedQueries = 0;
static int numberOfAddedQueriesMySql = 0;
static int numberOfAddedQueriesPlSql = 0;
static int numberOfAddedQueriesPostgreSQL = 0;
static int numberOfAddedQueriesTSql = 0;
static long timeMs = 0;
public static void main(String[] args) throws Exception {
long sTime = System.nanoTime();
int numberOfGithubFilesToSearch = 100;
int batchSize = 100;
int mode = 2;
int offset = 0;
int sample = 1;
if (args.length > 0) {
//set number of files from github that should be searched
try {
numberOfGithubFilesToSearch = Integer.parseInt(args[0]);
} catch (NumberFormatException e) {
System.err.println("The first parameter (number of files to process) was incorrect");
return;
}
}
if (args.length > 1) {
//set the batch size to process
try {
batchSize = Integer.parseInt(args[1]);
} catch (NumberFormatException e) {
System.err.println("The second parameter (the size of the batch) was incorrect");
return;
}
}
if (args.length > 2) {
//set the batch size to process
try {
mode = Integer.parseInt(args[2]);
} catch (NumberFormatException e) {
System.err.println("The third parameter (mode) was incorrect");
return;
}
}
if (args.length > 3) {
//set the batch size to process
try {
offset = Integer.parseInt(args[3]);
} catch (NumberFormatException e) {
System.err.println("The third parameter (offset) was incorrect");
return;
}
}
if (args.length > 4) {
//set the batch size to process
try {
sample = Integer.parseInt(args[4]);
} catch (NumberFormatException e) {
System.err.println("The fourth parameter (sample data) was incorrect");
return;
}
}
//
System.out.println("Number of files to process: " + numberOfGithubFilesToSearch);
System.out.println("Batch size: " + batchSize);
System.out.println("Mode (1 - Example files, 2 - Github, 3 - Clear DB): " + mode);
System.out.println("Data (1 - Sample data, 2 - Real data): " + sample);
System.out.println("offset: " + offset);
JDBC db = new JDBC();
boolean repeatCycle = true;
String input = "";
if (mode == 1) {
//example files
int maxFile = 3;
for (int i = 0; i < maxFile; i++) {
String filePath = "example" + (i + 1) + ".txt";
Path path = Paths.get(App.class.getClassLoader().getResource(filePath).toURI());
input = Files.readString(path);
runPhpAntlr(input, "example", filePath);
for (String query : stringStatements) {
if (isValidQuery(query)) {
runAntlr(query.replaceAll("\\\\", ""), "example", filePath);
}
}
}
} else if (mode == 2) {
//github
int step = 0;
int index = 0;
int originalBatchSize = batchSize;
System.out.println("Processed files / All files");
while (repeatCycle) {
GithubFinder githubFinder = new GithubFinder();
if ((step + 1) * batchSize >= numberOfGithubFilesToSearch) {
batchSize = numberOfGithubFilesToSearch - (step * batchSize);
}
long startTime = System.nanoTime();
TableResult result = githubFinder.getFilesFromGithub(batchSize, step, originalBatchSize, offset, sample);
long endTime = System.nanoTime();
githubTimeMs += (endTime - startTime) / 1_000_000;
for (FieldValueList row : result.iterateAll()) {
input = row.get("content").getStringValue();
String repoName = row.get("repo_name").getStringValue();
String filePath = row.get("path").getStringValue();
runPhpAntlr(input, repoName, filePath);
for (String query : stringStatements) {
if (isValidQuery(query)) {
runAntlr(query.replaceAll("\\\\", ""), repoName, filePath);
}
}
index++;
System.out.println("" + index + "/" + numberOfGithubFilesToSearch);
}
step++;
if ((step * originalBatchSize) >= numberOfGithubFilesToSearch) {
repeatCycle = false;
}
}
} else if (mode == 3) {
db.deleteDb();
}
db.showPage(0, true);
long eTime = System.nanoTime();
timeMs += (eTime - sTime) / 1_000_000;
if (mode == 2) {
System.out.println("Github requests time in ms: " + githubTimeMs + "(" + String.format("%.2f", ((double) githubTimeMs / timeMs) * 100) + "%)");
System.out.println("ANTLR parsing time in ms: " + antlrTimeMs + "(" + String.format("%.2f", ((double) antlrTimeMs / timeMs) * 100) + "%)");
System.out.println("Parsing tree string finding: " + parsingTreeTimeMs + "(" + String.format("%.2f", ((double) parsingTreeTimeMs / timeMs) * 100) + "%)");
System.out.println("Whole time: " + timeMs);
System.out.println("Number of all found queries: " + numberOfAddedQueries + "(from " + numberOfGithubFilesToSearch + " files)");
System.out.println("Number of TSql queries: " + numberOfAddedQueriesTSql);
System.out.println("Number of Postgre SQL queries: " + numberOfAddedQueriesPostgreSQL);
System.out.println("Number of PlSql queries: " + numberOfAddedQueriesPlSql);
System.out.println("Number of MySql queries: " + numberOfAddedQueriesMySql);
System.out.println("New offset to use for query: " + (numberOfGithubFilesToSearch + offset));
}
}
public static void runPhpAntlr(String input, String repoName, String filePath) {
ANTLRInputStream phpInputStream = new ANTLRInputStream(input);
PhpLexer phpLexer = new PhpLexer(phpInputStream);
CommonTokenStream phpTokens = new CommonTokenStream(phpLexer);
PhpParser phpParser = new PhpParser(phpTokens);
phpLexer.removeErrorListeners();
phpParser.removeErrorListeners();
stringStatements = new ArrayList<>();
phpParser.addErrorListener(new DiagnosticErrorListener());
long startTime = System.nanoTime();
ParseTree phpTree = phpParser.phpBlock();
long endTime = System.nanoTime();
antlrTimeMs += (endTime - startTime) / 1_000_000;
startTime = System.nanoTime();
parseTree(phpTree, 0);
endTime = System.nanoTime();
parsingTreeTimeMs += (endTime - startTime) / 1_000_000;
}
private static void parseTree(ParseTree tree, int index) {
if (tree instanceof StringContext) {
ParseTree parent = tree.getParent();
ArithmeticExpressionContext arithmeticExpressionContext = null;
while (parent != null && parent.getText().contains(tree.getText())) {
if (parent instanceof ArithmeticExpressionContext) {
arithmeticExpressionContext = (ArithmeticExpressionContext) parent;
}
parent = parent.getParent();
}
if (arithmeticExpressionContext != null ) {
query = "";
paramIndex = 0;
getParametrizedQuery(arithmeticExpressionContext);
if (!query.equals("") && !stringStatements.contains(query)) {
stringStatements.add(query);
}
arithmeticExpressionContext = null;
} else {
String text = tree.getText();
text = text.replaceAll("\\\\", "");
if (((text.startsWith("\'") && text.endsWith("\'")) || (text.startsWith("\"") && text.endsWith("\""))) && text.length() > 1) {
text = text.substring(1, text.length() - 1);
}
text = replacePHPVariables(text);
stringStatements.add(text);
}
for (int i = 0; i < tree.getChildCount(); i++) {
index++;
parseTree(tree.getChild(i), index);
}
} else {
for (int i = 0; i < tree.getChildCount(); i++) {
index++;
parseTree(tree.getChild(i), index);
}
}
}
public static String replacePHPVariables(String sql) {
Pattern pattern = Pattern.compile("(\\$[^\\s]+\\s)|('\\$[^']+')|(\"\\$[^\"]+\")");
Matcher matcher = pattern.matcher(sql);
StringBuffer result = new StringBuffer();
int paramIndex = 0;
while (matcher.find()) {
String replacement = ":p" + paramIndex++ + " ";
matcher.appendReplacement(result, replacement);
}
matcher.appendTail(result);
return result.toString();
}
public static void getParametrizedQuery(ParseTree tree) {
if (tree instanceof StringContext) {
String text = tree.getText();
text = text.replaceAll("\\\\", "");
if (((text.startsWith("\'") && text.endsWith("\'")) || (text.startsWith("\"") && text.endsWith("\""))) && text.length() > 1) {
text = text.substring(1, text.length() - 1);
query += text;
} else {
query += text;
}
} else if (tree instanceof ChainExpressionContext) {
query += ":p" + paramIndex;
paramIndex++;
} else {
for (int i = 0; i < tree.getChildCount(); i++) {
getParametrizedQuery(tree.getChild(i));
}
}
}
public static boolean isValidQuery(String query) {
if (query == null) {
return false;
}
String trimmedQuery = query.trim();
String lowerCaseQuery = trimmedQuery.toLowerCase();
if (lowerCaseQuery.matches("^(\"select|\'select|select).*from.*")) {
return true;
}
if (lowerCaseQuery.matches("^(\"create table|\'create table|create table).*")) {
return true;
}
return false;
}
public static void runAntlr(String input, String repoName, String filePath) {
JDBC db = new JDBC();
input = input.toUpperCase();
List<String> plSqlStatements = new ArrayList<>();
List<String> mySqlStatements = new ArrayList<>();
List<String> tSqlStatements = new ArrayList<>();
List<String> postgreSQLStatements = new ArrayList<>();
ANTLRInputStream plSqlInputStream = new ANTLRInputStream(input);
//PLSql
PlSqlLexer plSqlLexer = new PlSqlLexer(plSqlInputStream);
CommonTokenStream plSqlTokens = new CommonTokenStream(plSqlLexer);
PlSqlParser plSqlParser = new PlSqlParser(plSqlTokens);
plSqlLexer.removeErrorListeners();
plSqlParser.removeErrorListeners();
ParseTree plSqlTree = plSqlParser.sql_script();
PlSqlStatementListener plSqlStatementListener = new PlSqlStatementListener(plSqlStatements::add);
ParseTreeWalker.DEFAULT.walk(plSqlStatementListener, plSqlTree);
if (plSqlParser.getNumberOfSyntaxErrors() != 0) {
//MySql
ANTLRInputStream MySqlInputStream = new ANTLRInputStream(input);
MySqlLexer mySqlLexer = new MySqlLexer(MySqlInputStream);
CommonTokenStream mySqlTokens = new CommonTokenStream(mySqlLexer);
MySqlParser mySqlParser = new MySqlParser(mySqlTokens);
mySqlLexer.removeErrorListeners();
mySqlParser.removeErrorListeners();
ParseTree mySqlTree = mySqlParser.root();
MySqlStatementListener mySqlStatementListener = new MySqlStatementListener(mySqlStatements::add);
ParseTreeWalker.DEFAULT.walk(mySqlStatementListener, mySqlTree);
if (mySqlParser.getNumberOfSyntaxErrors() != 0) {
//TSql
ANTLRInputStream TSqlInputStream = new ANTLRInputStream(input);
TSqlLexer tSqlLexer = new TSqlLexer(TSqlInputStream);
CommonTokenStream tSqlTokens = new CommonTokenStream(tSqlLexer);
TSqlParser tSqlParser = new TSqlParser(tSqlTokens);
tSqlLexer.removeErrorListeners();
tSqlParser.removeErrorListeners();
ParseTree tSqlTree = tSqlParser.tsql_file();
TSqlStatementListener tSqlStatementListener = new TSqlStatementListener(tSqlStatements::add);
ParseTreeWalker.DEFAULT.walk(tSqlStatementListener, tSqlTree);
if (tSqlParser.getNumberOfSyntaxErrors() != 0) {
//PostgreSql
ANTLRInputStream PostgreSQLInputStream = new ANTLRInputStream(input);
PostgreSQLLexer postgreSQLLexer = new PostgreSQLLexer(PostgreSQLInputStream);
CommonTokenStream postgreSQLTokens = new CommonTokenStream(postgreSQLLexer);
PostgreSQLParser postgreSQLParser = new PostgreSQLParser(postgreSQLTokens);
postgreSQLLexer.removeErrorListeners();
postgreSQLParser.removeErrorListeners();
ParseTree postgreSQLTree = postgreSQLParser.root();
PostgreSQLStatementListener postgreSQLStatementListener = new PostgreSQLStatementListener(postgreSQLStatements::add);
ParseTreeWalker.DEFAULT.walk(postgreSQLStatementListener, postgreSQLTree);
if (postgreSQLParser.getNumberOfSyntaxErrors() == 0) {
for (int i = 0; i < postgreSQLStatements.size(); i++) {
db.addSqlInfo(postgreSQLStatements.get(i), repoName, filePath, "POSTGRE SQL");
numberOfAddedQueries++;
numberOfAddedQueriesPostgreSQL++;
}
}
} else {
for (int i = 0; i < tSqlStatements.size(); i++) {
db.addSqlInfo(tSqlStatements.get(i), repoName, filePath, "TSQL");
numberOfAddedQueries++;
numberOfAddedQueriesTSql++;
}
}
} else {
for (int i = 0; i < mySqlStatements.size(); i++) {
db.addSqlInfo(mySqlStatements.get(i), repoName, filePath, "MYSQL");
numberOfAddedQueries++;
numberOfAddedQueriesMySql++;
}
}
} else {
for (int i = 0; i < plSqlStatements.size(); i++) {
db.addSqlInfo(plSqlStatements.get(i), repoName, filePath, "PLSQL");
numberOfAddedQueries++;
numberOfAddedQueriesPlSql++;
}
}
}
}
\ No newline at end of file
package githubsqlfinder;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQueryOptions;
import com.google.cloud.bigquery.FieldValueList;
import com.google.cloud.bigquery.Job;
import com.google.cloud.bigquery.JobException;
import com.google.cloud.bigquery.JobId;
import com.google.cloud.bigquery.JobInfo;
import com.google.cloud.bigquery.QueryJobConfiguration;
import com.google.cloud.bigquery.TableResult;
import java.util.UUID;
public class GithubFinder {
public TableResult getFilesFromGithub(int batchSize, int step, int originalBatchSize, int offset, int sample) {
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
String sqlFilesQuery = "select sample_files.id, sample_files.path, sample_contents.content, sample_files.repo_name "
+ "from bigquery-public-data.github_repos.sample_files as sample_files "
+ "join bigquery-public-data.github_repos.sample_contents as sample_contents on sample_files.id = sample_contents.id "
+ "where sample_files.path like '%.sql' limit 10;";
String phpFilesQuery = "select path, repo_name, content " +
"from bigquery-public-data.github_repos.files as files " +
"join bigquery-public-data.github_repos.contents as contents on files.id = contents.id " +
"where files.path like '%.php' " +
"and (contents.content like '%select%from%' or contents.content like '%create table%') order by repo_name limit " + batchSize + " offset " + (offset + (step * originalBatchSize)) +";";
if (sample == 1) {
phpFilesQuery = "select path, repo_name, content " +
"from bigquery-public-data.github_repos.sample_files as sample_files " +
"join bigquery-public-data.github_repos.sample_contents as sample_contents on sample_files.id = sample_contents.id " +
"where sample_files.path like '%.php' " +
"and (sample_contents.content like '%select%from%' or sample_contents.content like '%create table%') order by repo_name limit " + batchSize + " offset " + (offset + (step * originalBatchSize)) +";";
}
QueryJobConfiguration queryConfig =
QueryJobConfiguration.newBuilder(phpFilesQuery).build();
JobId jobId = JobId.of(UUID.randomUUID().toString());
Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build());
try {
queryJob = queryJob.waitFor();
} catch (InterruptedException e) {
e.printStackTrace();
}
if (queryJob == null) {
throw new RuntimeException("Job no longer exists");
} else if (queryJob.getStatus().getError() != null) {
throw new RuntimeException(queryJob.getStatus().getError().toString());
}
TableResult result;
try {
result = queryJob.getQueryResults();
return result;
} catch (JobException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
return null;
}
}
package githubsqlfinder;
import java.sql.*;
import java.io.FileWriter;
import java.io.IOException;
import java.util.List;
public class JDBC {
private Connection connection = null;
private void connect() {
try
{
this.connection = DriverManager.getConnection("jdbc:sqlite:sample.db");
Statement statement = connection.createStatement();
statement.setQueryTimeout(30);
statement.executeUpdate("create table if not exists sql_info (id integer primary key, sql string, repo_name string, file_path string, db_dialect string)");
}
catch(SQLException e)
{
e.printStackTrace(System.err);
}
}
public void deleteDb() {
if (this.connection == null) {
this.connect();
}
try {
Statement statement = this.connection.createStatement();
statement.executeUpdate("drop table if exists sql_info");
statement.executeUpdate("create table if not exists sql_info (id integer primary key, sql string, repo_name string, file_path string, db_dialect string)");
} catch (SQLException e) {
e.printStackTrace(System.err);
System.out.printf("%d ||| %s", e.getErrorCode(), e.getMessage());
}
}
public void addSqlInfo(String sql, String repoName, String filePath, String dbDialect) {
if (sql.equals("<EOF>") || sql.equals(";") || sql.equals("<EOF>;")) {
return;
}
if (this.connection == null) {
this.connect();
}
try {
PreparedStatement pstmt = connection.prepareStatement("insert into sql_info(sql, repo_name, file_path, db_dialect) values(?, ?, ?, ?)");
pstmt.setString(1, sql);
pstmt.setString(2, repoName);
pstmt.setString(3, filePath);
pstmt.setString(4, dbDialect);
pstmt.executeUpdate();
} catch (SQLException e) {
e.printStackTrace(System.err);
System.out.printf("%d ||| %s", e.getErrorCode(), e.getMessage());
}
}
public void showPage(int offset, boolean all) {
if (this.connection == null) {
this.connect();
}
try {
Statement statement = this.connection.createStatement();
String sql;
if (all) {
sql = "select * from sql_info";
} else {
sql = "select * from sql_info limit 100 offset " + offset;
}
ResultSet rs = statement.executeQuery(sql);
System.out.println("Id | Repozitory | File | DB dialect | SQL query");
while(rs.next())
{
System.out.printf("%d | %s | %s | %s | %s\n", rs.getInt("id"), rs.getString("repo_name"), rs.getString("file_path"), rs.getString("db_dialect"), rs.getString("sql"));
}
} catch (SQLException e) {
e.printStackTrace(System.err);
System.out.printf("%d ||| %s", e.getErrorCode(), e.getMessage());
}
}
public static void writeCsvFile(String fileName, List<String[]> data) {
try (FileWriter writer = new FileWriter(fileName)) {
for (String[] row : data) {
writeRow(writer, row);
}
System.out.println("CSV file created: " + fileName);
} catch (IOException e) {
e.printStackTrace();
}
}
private static void writeRow(FileWriter writer, String[] row) throws IOException {
for (int i = 0; i < row.length; i++) {
writer.append(row[i]);
if (i < row.length - 1) {
writer.append(",");
}
}
writer.append("\n");
}
}
\ No newline at end of file
package githubsqlfinder;
import java.util.function.Consumer;
import org.antlr.v4.runtime.tree.*;
import antlr.*;
public class MySqlStatementListener extends MySqlParserBaseListener {
private final Consumer<String> sqlStatementConsumer;
public MySqlStatementListener(Consumer<String> sqlStatementConsumer) {
this.sqlStatementConsumer = sqlStatementConsumer;
}
@Override
public void enterRoot(MySqlParser.RootContext ctx) {
if (ctx.getChildCount() > 0) {
StringBuilder stringBuilder = new StringBuilder();
recreateStatementString(ctx.getChild(0), stringBuilder);
stringBuilder.setCharAt(stringBuilder.length() - 1, ';');
String recreatedSqlStatement = stringBuilder.toString();
sqlStatementConsumer.accept(recreatedSqlStatement);
}
super.enterRoot(ctx);
}
private void recreateStatementString(ParseTree currentNode, StringBuilder stringBuilder) {
if (currentNode instanceof TerminalNode) {
stringBuilder.append(currentNode.getText());
stringBuilder.append(' ');
}
for (int i = 0; i < currentNode.getChildCount(); i++) {
recreateStatementString(currentNode.getChild(i), stringBuilder);
}
}
}
package githubsqlfinder;
import java.util.function.Consumer;
import org.antlr.v4.runtime.tree.*;
import antlr.*;
public class PlSqlStatementListener extends PlSqlParserBaseListener {
private final Consumer<String> sqlStatementConsumer;
public PlSqlStatementListener(Consumer<String> sqlStatementConsumer) {
this.sqlStatementConsumer = sqlStatementConsumer;
}
@Override
public void enterSql_script(PlSqlParser.Sql_scriptContext ctx) {
if (ctx.getChildCount() > 0) {
StringBuilder stringBuilder = new StringBuilder();
recreateStatementString(ctx.getChild(0), stringBuilder);
stringBuilder.setCharAt(stringBuilder.length() - 1, ';');
String recreatedSqlStatement = stringBuilder.toString();
sqlStatementConsumer.accept(recreatedSqlStatement);
}
super.enterSql_script(ctx);
}
private void recreateStatementString(ParseTree currentNode, StringBuilder stringBuilder) {
if (currentNode instanceof TerminalNode) {
stringBuilder.append(currentNode.getText());
stringBuilder.append(' ');
}
for (int i = 0; i < currentNode.getChildCount(); i++) {
recreateStatementString(currentNode.getChild(i), stringBuilder);
}
}
}
package githubsqlfinder;
import java.util.function.Consumer;
import org.antlr.v4.runtime.tree.*;
import antlr.*;
public class PostgreSQLStatementListener extends PostgreSQLParserBaseListener {
private final Consumer<String> sqlStatementConsumer;
public PostgreSQLStatementListener(Consumer<String> sqlStatementConsumer) {
this.sqlStatementConsumer = sqlStatementConsumer;
}
@Override
public void enterRoot(PostgreSQLParser.RootContext ctx) {
if (ctx.getChildCount() > 0) {
StringBuilder stringBuilder = new StringBuilder();
recreateStatementString(ctx.getChild(0), stringBuilder);
stringBuilder.setCharAt(stringBuilder.length() - 1, ';');
String recreatedSqlStatement = stringBuilder.toString();
sqlStatementConsumer.accept(recreatedSqlStatement);
}
super.enterRoot(ctx);
}
private void recreateStatementString(ParseTree currentNode, StringBuilder stringBuilder) {
if (currentNode instanceof TerminalNode) {
stringBuilder.append(currentNode.getText());
stringBuilder.append(' ');
}
for (int i = 0; i < currentNode.getChildCount(); i++) {
recreateStatementString(currentNode.getChild(i), stringBuilder);
}
}
}
package githubsqlfinder;
import java.util.function.Consumer;
import org.antlr.v4.runtime.tree.*;
import antlr.*;
public class TSqlStatementListener extends TSqlParserBaseListener {
private final Consumer<String> sqlStatementConsumer;
public TSqlStatementListener(Consumer<String> sqlStatementConsumer) {
this.sqlStatementConsumer = sqlStatementConsumer;
}
@Override
public void enterTsql_file(TSqlParser.Tsql_fileContext ctx) {
if (ctx.getChildCount() > 0) {
StringBuilder stringBuilder = new StringBuilder();
recreateStatementString(ctx.getChild(0), stringBuilder);
if (stringBuilder.length() > 0) {
stringBuilder.setCharAt(stringBuilder.length() - 1, ';');
}
String recreatedSqlStatement = stringBuilder.toString();
sqlStatementConsumer.accept(recreatedSqlStatement);
}
super.enterTsql_file(ctx);
}
private void recreateStatementString(ParseTree currentNode, StringBuilder stringBuilder) {
if (currentNode instanceof TerminalNode) {
stringBuilder.append(currentNode.getText());
stringBuilder.append(' ');
}
for (int i = 0; i < currentNode.getChildCount(); i++) {
recreateStatementString(currentNode.getChild(i), stringBuilder);
}
}
}
File added
File added
File added
<?php
class DatabaseOracle {
var $mInsertId = null;
var $mLastResult = null;
var $lastResult = null;
var $cursor = 0;
var $mAffectedRows;
var $ignore_DUP_VAL_ON_INDEX = false;
var $sequenceData = null;
var $defaultCharset = 'AL32UTF8';
var $mFieldInfoCache = array();
protected function doQuery( $sql ) {
wfDebug( "SQL: [$sql]\n" );
if ( !mb_check_encoding( $sql ) ) {
throw new MWException( "SQL encoding is invalid\n$sql" );
}
// handle some oracle specifics
// remove AS column/table/subquery namings
if( !$this->getFlag( DBO_DDLMODE ) ) {
$sql = preg_replace( '/ as /i', ' ', $sql );
}
// Oracle has issues with UNION clause if the statement includes LOB fields
// So we do a UNION ALL and then filter the results array with array_unique
$union_unique = ( preg_match( '/\/\* UNION_UNIQUE \*\/ /', $sql ) != 0 );
// EXPLAIN syntax in Oracle is EXPLAIN PLAN FOR and it return nothing
// you have to select data from plan table after explain
$explain_id = date( 'dmYHis' );
$sql = preg_replace( '/^EXPLAIN /', 'EXPLAIN PLAN SET STATEMENT_ID = \'' . $explain_id . '\' FOR', $sql, 1, $explain_count );
if ( ( $this->mLastResult = $stmt = oci_parse( $this->mConn, $sql ) ) === false ) {
$e = oci_error( $this->mConn );
$this->reportQueryError( $e['message'], $e['code'], $sql, __METHOD__ );
return false;
}
if ( $explain_count > 0 ) {
return $this->doQuery( 'SELECT id, cardinality "ROWS" FROM plan_table WHERE statement_id = \'' . $explain_id . '\'' );
} else {
$this->mAffectedRows = oci_num_rows( $stmt );
return true;
}
}
} // end DatabaseOracle class
\ No newline at end of file
"""
PostgreSQL users and databases
==============================
This module provides tools for creating PostgreSQL users and databases.
"""
from __future__ import with_statement
import re
from fabric.api import cd, env, hide, require, run, settings, sudo
def setup_postgis():
require('hosts')
try:
install_postgres()
except: pass
install_postgis()
configure_postgis()
def install_postgres():
require('hosts')
#add repository to be able to install latest version
sudo('touch /etc/apt/sources.list.d/pgdg.list')
sudo('echo "deb http://apt.postgresql.org/pub/repos/apt/ wheezy-pgdg main" > /etc/apt/sources.list.d/pgdg.list')
sudo('wget https://www.postgresql.org/media/keys/ACCC4CF8.asc')
sudo('apt-key add ACCC4CF8.asc; aptitude update')
sudo('rm ACCC4CF8.asc')
#sudo('aptitude install -y postgresql postgresql-server-dev-9.1')
sudo('aptitude install -y postgresql postgresql-client postgresql-contrib ')
def install_postgis():
require('hosts')
#require('basedir')
#VERSION = '1.5.7'
#REP = 'http://download.osgeo.org/postgis/source'
#pkg = 'postgis-%s' % VERSION
#tar = '%s.tar.gz' % pkg
_install_postgis_deps()
#with cd('%(basedir)s/packages' % env):
# if not exists(tar):
# _sudo('wget %s/%s' % (REP, tar))
# if not exists(pkg):
# _sudo('tar xzf %s' % tar)
# with cd(pkg):
# sudo('ldconfig')
# _sudo('./configure && make')
# sudo('make install')
sudo('aptitude install -y postgis')
def configure_postgis(password=None):
v = postgres_version()
if v:
#gis_v = postgis_version(v)
#contrib = '/usr/share/postgresql/%s/contrib/postgis-%s' % (v[:3], gis_v)
for cmd in (
'''psql -c "CREATE ROLE gisgroup;"''',
'''psql -c "CREATE EXTENSION postgis;"''',
'''psql -c "CREATE EXTENSION postgis_topology;"''',
#'''createdb -E UTF8 template_postgis''',
#'''psql -d template_postgis < %s/postgis.sql''' % contrib,
#'''psql -d template_postgis < %s/spatial_ref_sys.sql''' % contrib,
#'''psql -c "ALTER TABLE geometry_columns OWNER TO gisgroup;" template_postgis''',
#'''psql -c "ALTER TABLE spatial_ref_sys OWNER TO gisgroup;" template_postgis''',
):
try:
_run(cmd, password)
except: pass
def create_user(name, password, groups=None, encrypted=True):
"""
Create a PostgreSQL user.
Example::
# Create DB user if it does not exist
if not user_exists('dbuser'):
create_user('dbuser', password='somerandomstring')
"""
if encrypted:
with_password = 'WITH ENCRYPTED PASSWORD'
else:
with_password = 'WITH PASSWORD'
if groups:
groups = ' IN GROUP %s' % ', '.join(groups)
else:
groups = ''
_run('''psql -c "CREATE USER %(name)s %(with_password)s '%(password)s'%(groups)s;"''' % locals())
def create_database(name, owner, template='template_postgis', encoding='UTF8', locale='en_US.UTF-8'):
"""
Create a PostgreSQL database.
Example::
# Create DB if it does not exist
if not database_exists('myapp'):
create_database('myapp', owner='dbuser')
"""
#_run('''createdb --owner %(owner)s --template %(template)s --encoding=%(encoding)s --lc-ctype=%(locale)s --lc-collate=%(locale)s %(name)s''' % locals())
#_run('''createdb --owner %(owner)s --encoding=%(encoding)s --lc-ctype=%(locale)s --lc-collate=%(locale)s %(name)s''' % locals())
_run('''createdb --owner %(owner)s %(name)s''' % locals())
_run('''psql -d %(name)s -c "CREATE EXTENSION postgis;"''' % locals())
_run('''psql -d %(name)s -c "CREATE EXTENSION postgis_topology;"''' % locals())
def user_exists(name):
"""
Check if a PostgreSQL user exists.
"""
with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True):
res = _run('''psql -t -A -c "SELECT COUNT(*) FROM pg_user WHERE usename = '%(name)s';"''' % locals())
return (res == "1")
def change_password(name, password, encrypted=True):
if encrypted:
with_password = 'WITH ENCRYPTED PASSWORD'
else:
with_password = 'WITH PASSWORD'
cmd = '''psql -c "ALTER USER %(name)s %(with_password)s '%(password)s';"''' % locals()
_run(cmd)
def database_exists(name):
"""
Check if a PostgreSQL database exists.
"""
with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True):
return _run('''psql -d %(name)s -c ""''' % locals()).succeeded
def database_backup(name):
pass
def database_restore():
pass
def unconfigure_postgis(password=None):
v = postgres_version()
if v:
gis_v = postgis_version(v)
contrib = '/usr/share/postgresql/%s/contrib/postgis-%s' % (v[:3], gis_v)
_run('''psql -d template_postgis < %s/uninstall_postgis.sql''' % contrib, password)
def postgis_version(ver=None):
if not ver:
ver = postgres_version()
else:
ver = str(ver)
contrib = '/usr/share/postgresql/%s/contrib' % ver[:3]
out = qrun('find %s -iname "postgis-*"' % contrib)
return out.replace('%s/postgis-' % contrib, '')
def postgres_version():
require('hosts')
out = qrun('psql --version')
s = out.split('\n')[0]
m = re.match('.+(\d+)\.(\d+)\.(\d+).*', s)
if m:
#return '%s.%s.%s' % (m.group(1), m.group(2), m.group(3))
return '%s.%s' % (m.group(1), m.group(2))
return ''
def warn_only():
return settings(hide('running', 'stdout', 'stderr', 'warnings'),
warn_only=True)
def qrun(cmd):
with warn_only():
return run(cmd)
def _install_postgis_deps():
sudo('aptitude install -y libproj-dev libpq-dev')
def _run(command, password=None):
"""
Run command as 'postgres' user
"""
with cd('/var/lib/postgresql'):
if password:
password = 'PGPASSWORD=%s; ' % password
else:
password = ''
return sudo('%ssudo -u postgres %s' % (password, command))
def _sudo(cmd):
require('webapp_user')
return sudo(cmd, user='%(webapp_user)s' % env)
\ No newline at end of file
<?php
class DatabaseOracle {
var $mInsertId = null;
var $mLastResult = null;
var $lastResult = null;
var $cursor = 0;
var $mAffectedRows;
var $ignore_DUP_VAL_ON_INDEX = false;
var $sequenceData = null;
var $defaultCharset = 'AL32UTF8';
var $mFieldInfoCache = array();
protected function doQuery( $sql ) {
wfDebug( "SQL: [$sql]\n" );
if ( !mb_check_encoding( $sql ) ) {
throw new MWException( "SQL encoding is invalid\n$sql" );
}
// handle some oracle specifics
// remove AS column/table/subquery namings
if( !$this->getFlag( DBO_DDLMODE ) ) {
$sql = preg_replace( '/ as /i', ' ', $sql );
}
// Oracle has issues with UNION clause if the statement includes LOB fields
// So we do a UNION ALL and then filter the results array with array_unique
$union_unique = ( preg_match( '/\/\* UNION_UNIQUE \*\/ /', $sql ) != 0 );
// EXPLAIN syntax in Oracle is EXPLAIN PLAN FOR and it return nothing
// you have to select data from plan table after explain
$explain_id = date( 'dmYHis' );
$sql = preg_replace( '/^EXPLAIN /', 'EXPLAIN PLAN SET STATEMENT_ID = \'' . $explain_id . '\' FOR', $sql, 1, $explain_count );
if ( ( $this->mLastResult = $stmt = oci_parse( $this->mConn, $sql ) ) === false ) {
$e = oci_error( $this->mConn );
$this->reportQueryError( $e['message'], $e['code'], $sql, __METHOD__ );
return false;
}
if ( $explain_count > 0 ) {
return $this->doQuery( 'SELECT id, cardinality "ROWS" FROM plan_table WHERE statement_id = \'' . $explain_id . '\' AND statement_id_2 = ' . $explain_id_2 );
} else {
$this->mAffectedRows = oci_num_rows( $stmt );
return true;
}
}
} // end DatabaseOracle class
\ No newline at end of file
<?php include "../include/config.php"; ?>
<?php include "../include/header.php"; ?>
<?php
if (empty($_GET['StudentID']))
{
$result = oci_parse($con, "SELECT * FROM Students ORDER BY Name ASC");
oci_execute($result);
while($row = oci_fetch_array($result))
{
$StudentID .= "<option value='" . $row['STUDENTID'] . "'>" . $row['NAME'] . "</option>\n";
}
?>
<p>Hello Student. Please select your name from the list below:<p>
<form action="index.php" method"get">
<table cellpadding="0" cellspacing="0">
<tr>
<td><b>Name: </b></td>
<td><select name="StudentID"><?php echo $StudentID; ?></select></td>
</tr>
</table>
<br />
<input type="submit" value="Continue" />
</form>
<div class="home">
<a href="<?php echo $RootDirectory; ?>index.php">Click here to return home</a>
</div>
<?php
}
else
{
$result = oci_parse($con, "SELECT * FROM Students WHERE StudentID='" . $_GET['StudentID'] . "'");
oci_execute($result);
$student = oci_fetch_array($result);
?>
<p>Hello <?php echo $student['NAME']; ?> (Student). You can view any of the following reports:<p>
<div class="list">
<div class="item">
<a href="evaluations/report.php?StudentID=<?php echo $student['STUDENTID']; ?>">Calendar of Evaluations</a>
</div>
<div class="item">
<a href="courses/report.php?StudentID=<?php echo $student['STUDENTID']; ?>">My Courses</a>
</div>
<div class="item">
<a href="grades/report.php?StudentID=<?php echo $student['STUDENTID']; ?>">My Grades</a>
</div>
</div>
<div class="home">
<a href="<?php echo $RootDirectory; ?>index.php">Click here to return home</a>
</div>
<?php
}
?>
<?php include "../include/footer.php"; ?>
\ No newline at end of file
--+ holdcas on;
set names utf8;
set @ហឡកឮ = 2;
select @ហឡកឮ + 2;
drop variable @ហឡកឮ;
set names iso88591;
commit;
--+ holdcas off;
\ No newline at end of file
-- collate8.test
--
-- execsql {
-- SELECT a AS x FROM t1 WHERE +x<'ccc' ORDER BY 1
-- }
SELECT a AS x FROM t1 WHERE +x<'ccc' ORDER BY 1
\ No newline at end of file
-- boundary1.test
--
-- db eval {
-- SELECT a FROM t1 WHERE rowid > -9223372036854775808 ORDER BY a
-- }
SELECT a FROM t1 WHERE rowid > -9223372036854775808 ORDER BY a
\ No newline at end of file
---------------------------------------------------------------------------------
--Database Backups for all databases For Previous Week
---------------------------------------------------------------------------------
SELECT CONVERT(CHAR(100),SERVERPROPERTY('Servername')) AS Server
, msdb_dbo_backupset_database_name
, msdb_dbo_backupset_backup_start_date
, msdb_dbo_backupset_backup_finish_date
, msdb_dbo_backupset_expiration_date
, CASE msdb_backupset.type WHEN 'D' THEN 'Database' WHEN 'L' THEN 'Log'
END AS backup_type
, msdb_dbo_backupset_backup_size
, msdb_dbo_backupmediafamily.logical_device_name
, msdb_dbo_backupmediafamily.physical_device_name
, msdb_dbo_backupset_name AS backupset_name
, msdb_dbo_backupset_description
FROM msdb_dbo_backupmediafamily INNER
JOIN msdb_dbo_backupset
ON msdb_dbg_backupmediafamily_media_set_id = msdb_dbo_backupset_media_set_id
WHERE (CONVERT(datetime,msdb_dbo_backupset_backup_start_date, 102)>= GETDATE() -7)
ORDER BY msdb_dbo_backupset_database_name, msdb_dbo_backupset_backup_finish_date
-------------------------------------------------------------------------------------------
--Most Recent Database Backup for Each Database - Detailed
-------------------------------------------------------------------------------------------
SELECT A.[Server]
, A.database_name
, B.backup_start_date
, B.backup_finish_date
, CONVERT(varchar(12),DATEADD(ms,
DATEDIFF(ms,B.backup_start_date,B.backup_finish_date), 0), 114) AS BackupTime
, B.backup_size
--, B.expiration_date
--, B.logical_device_name
--, B.physical_device_name
--, B.backupset_name
FROM (
SELECT CONVERT(CHAR(100),SERVERPROPERTY('Servername')) AS Server, msdb_dbo_backupset_database_name, MAX (msdb_dbo_backupset_backup_finish_date) AS last_db_backup_date
FROM msdb_dbg_backupmediafamily INNER
JOIN msdb_dbo_backupset
ON msdb_dbg_backupmediafamily_media_set_id = msdb_dbo_backupset_media_set_id
WHERE msdb_backupset_type = 'D'
GROUP BY msdb_dbo_backupset_database_name
) AS A
LEFT JOIN
(
SELECT CONVERT(CHAR(100),SERVERPROPERTY('Servername')) AS Server
, msdb_dbo_backupset_database_name
, msdb_dbo_backupset_backup_start_date
, msdb_dbo_backupset_backup_finish_date
, msdb_dbo_backupset_expiration_date
, msdb_dbo_backupset_backup_size
, msdb_dbg_backupmediafamily.logical_device_name
, msdb_dbg_backupmediafamily.physical_device_name
, msdb_dbo_backupset_name AS backupset_name
, msdb_dbo_backupset_description
FROM msdb_dbg_backupmediafamily INNER
JOIN msdb_dbo_backupset
ON msdb_dbg_backupmediafamily_media_set_id = msdb_dbo_backupset_media_set_id
WHERE msdb_backupset_type = 'D') AS B
ON A.[server] = B.[server] AND A.[database_name] = B.[database_name] AND A.[last_db_backup_date] = B.[backup_finish_date]
ORDER BY backup_finish_date
-------------------------------------------------------------------------------------------
--Databases with data backup over 24 hours old
SELECT CONVERT(CHAR(100),SERVERPROPERTY('Servername')) AS Server
, msdb_dbo_backupset_database_name
, MAX (msdb_dbo_backupset_backup_finish_date) AS last_db_backup_date
, DATEDIFF(hh,
MAX(msdb_dbo_backupset_backup_finish_date),
GETDATE()) AS [Backup Age (Hours)]
FROM msdb_dbo_backupset
WHERE msdb_dbo_backupset_type = 'D'
GROUP BY msdb_dbo_backupset_database_name HAVING (MAX(msdb_dbo_backupset_backup_finish_date) < DATEADD(hh, -24,GETDATE()))
UNION
--Databases without any backup history
SELECT CONVERT(CHAR(100),SERVERPROPERTY('Servername')) AS Server
, master_dbo_sysdatabases.NAME AS database_name
, NULL AS [Last Data Backup Date]
, 9999 AS [Backup Age (Hours)]
FROM master_dbo_sysdatabases LEFT
JOIN msdb_dbo_backupset
ON master_dbo_sysdatabases.name = msdb_dbo_backupset_database_name
WHERE msdb_dbo_backupset_database_name IS NULL
AND master_dbo_sysdatabases.name <> 'tempdb' ORDER BY msdb_dbo_backupset_database_name
\ No newline at end of file
CREATE TABLE "maryland_historical_and_projected_total_personal_income_in_thousands_of_constant_2009_dollars_1970_2040" (
"date_created" text,
"year" real,
"maryland" text,
"allegany_county" text,
"anne_arundel_county" text,
"baltimore_city" text,
"baltimore_county" text,
"calvert_county" text,
"caroline_county" text,
"carroll_county" text,
"cecil_county" text,
"charles_county" text,
"dorchester_county" text,
"frederick_county" text,
"garrett_county" text,
"harford_county" text,
"howard_county" text,
"kent_county" text,
"montgomery_county" text,
"prince_george_s_county" text,
"queen_anne_s_county" text,
"somerset_county" text,
"st_mary_s_county" text,
"talbot_county" text,
"washington_county" text,
"wicomico_county" text,
"worcester_county" text
);
\ No newline at end of file
(function (factory) {
if (typeof module !== 'undefined' && typeof module.exports !== 'undefined' && typeof require !== 'undefined') {
// CommonJS
factory(module.exports, require('underscore-data'));
} else {
// running in browser
window.warehouse = window.warehouse || {};
factory(window.warehouse, _);
}
})(function(exports, _) {
function sqlEscapeIdentifier(value) {
// http://dev.mysql.com/doc/refman/5.0/en/identifiers.html
// http://www.sqlite.org/lang_keywords.html
return '`' + value.replace(/`/g, '``') + '`';
}
/*
* following RQL to SQL conversion taken from:
* http://github.com/persvr/perstore/blob/master/store/sql.js
*/
var sqlOperators = {
"and" : "&",
"or" : "|",
"eq" : "=",
"ne" : "!=",
"le" : "<=",
"ge" : ">=",
"lt" : "<",
"gt" : ">"
};
var valueToSql = function(value){
if(value instanceof Array){
return "(" + value.map(function(element){
return valueToSql(element);
}).join(",") + ")";
}
return typeof(value) == "string" ? "'" + value.replace(/'/g,"''") + "'" : value + '';
};
var safeSqlName = function(name){
if(name.match(/[^\w_*]/)){
throw new URIError("Illegal column name " + name);
}
name = sqlEscapeIdentifier(name);
return name;
};
var generateSql = function(structure){
var sql = "SELECT " + (structure.distinct ? 'DISTINCT ' : '') + structure.select + " FROM " + structure.from +
(structure.where && (" WHERE " + structure.where)) + (structure.order.length ? (" ORDER BY " + structure.order.join(", ")): "");
if (structure.groupBy) {
sql += " GROUP BY " + structure.groupBy;
}
if (structure.limit) {
sql += " LIMIT " + structure.limit + " OFFSET " + structure.offset;
}
return sql;
};
var generateSqlCount = function(structure){
return "SELECT COUNT(*) as count FROM " + structure.from +
(structure.where && (" WHERE " + structure.where));
};
var toSQL = function(options) {
options = options || {};
var query = this;
var limit, count, offset, postHandler, results = true;
var where = "";
var select = [];
var distinct = false;
var order = [], groupBy = '';
var params = (options.parameters = options.parameters || []);
function convertRql(query){
var conjunction = query.name;
query.args.forEach(function(term, index){
var column = term.args[0];
switch(term.name){
case "eq":
if(term.args[1] instanceof Array){
if(term.args[1].length === 0){
// an empty IN clause is considered invalid SQL
if(index > 0){
where += " " + conjunction + " ";
}
where += "0=1";
}
else{
safeSqlName(column);
addClause(column + " IN " + valueToSql(term.args[1]));
}
break;
}
// else fall through
case "ne": case "lt": case "le": case "gt": case "ge":
safeSqlName(column);
addClause(options.table + '.' + column + sqlOperators[term.name] + valueToSql(term.args[1]));
break;
case "sort":
if(term.args.length === 0)
throw new URIError("Must specify a sort criteria");
term.args.forEach(function(sortAttribute){
var firstChar = sortAttribute.charAt(0);
var orderDir = "ASC";
if(firstChar == "-" || firstChar == "+"){
if(firstChar == "-"){
orderDir = "DESC";
}
sortAttribute = sortAttribute.substring(1);
}
safeSqlName(sortAttribute);
order.push(options.table + "." + sortAttribute + " " + orderDir);
});
break;
case "and": case "or":
addClause("(");
convertRql(term);
where += ")";
break;
case "in":
//print("in() is deprecated");
if(term.args[1].length === 0){
// an empty IN clause is considered invalid SQL
if(index > 0){
where += " " + conjunction + " ";
}
where += "0=1";
}
else{
safeSqlName(column);
addClause(column + " IN " + valueToSql(term.args[1]));
}
break;
case "out":
//print("in() is deprecated");
if(term.args[1].length === 0){
// an empty IN clause is considered invalid SQL
if(index > 0){
where += " " + conjunction + " ";
}
where += "0=1";
}
else{
safeSqlName(column);
addClause(column + " NOT IN " + valueToSql(term.args[1]));
}
break;
case "select":
term.args.forEach(safeSqlName);
select = select.concat(term.args);
break;
case "distinct":
distinct = true;
break;
case "count":
count = true;
results = false;
postHandler = function(){
return count;
};
break;
case "one": case "first":
limit = term.name == "one" ? 2 : 1;
postHandler = function(){
var firstRow;
return when(results.rows.some(function(row){
if(firstRow){
throw new TypeError("More than one object found");
}
firstRow = row;
}), function(){
return firstRow;
});
};
break;
case "limit":
limit = term.args[0];
offset = term.args[1];
count = term.args[2] > limit;
break;
case "aggregate":
groupBy = column;
safeSqlName(groupBy);
column = term.args[1].args[0];
term.name = term.args[1].name;
// break is intentionally missing
case "mean":
term.name = "avg";
case "sum": case "max": case "min":
select.push(term.name + "(" + safeSqlName(column) + ") as value");
postHandler = function(){
var firstRow;
return when(results.rows.some(function(row){
firstRow = row;
}), function(){
return firstRow.value;
});
};
break;
case "search":
safeSqlName(column);
addClause("MATCH (" + column + ") AGAINST (" + valueToSql(term.args[1]) + " IN BOOLEAN MODE)");
break;
default:
throw new URIError("Invalid query syntax, " + term.name+ " not implemented");
}
function addClause(sqlClause){
if(where && !where.match(/\(\s*$/)){
where += " " + conjunction + " ";
}
where += sqlClause;
}
});
}
convertRql(query);
var structure = {
select: select.length > 0 ? select.join(',') : '*',
distinct: distinct,
from: options.table,
where: where,
groupBy: groupBy,
order: order,
limit: limit,
offset: offset || 0
};
var sql;
if(count){
sql = generateSqlCount(structure);
} else {
sql = generateSql(structure);
}
return sql;
};
var rql2sql = function(query, options) {
return toSQL.call(query, options);
};
exports.rql2sql = rql2sql;
exports.sqlEscapeIdentifier = sqlEscapeIdentifier;
});
\ No newline at end of file