1 | package felix.main; |
2 | |
3 | import java.io.File; |
4 | import java.sql.ResultSet; |
5 | import java.sql.SQLException; |
6 | import java.util.ArrayList; |
7 | import java.util.Date; |
8 | import java.util.HashMap; |
9 | import java.util.HashSet; |
10 | import java.util.regex.Matcher; |
11 | import java.util.regex.Pattern; |
12 | |
13 | |
14 | |
15 | import tuffy.db.RDB; |
16 | import tuffy.db.SQLMan; |
17 | import tuffy.mln.Clause; |
18 | import tuffy.mln.Predicate; |
19 | import tuffy.mln.Type; |
20 | import tuffy.ra.ConjunctiveQuery; |
21 | import tuffy.util.Config; |
22 | import tuffy.util.ExceptionMan; |
23 | import tuffy.util.FileMan; |
24 | import tuffy.util.StringMan; |
25 | import tuffy.util.Timer; |
26 | import tuffy.util.UIMan; |
27 | import felix.compiler.StaticAnalyzer; |
28 | import felix.dstruct.ExecutionPlan; |
29 | import felix.dstruct.FelixPredicate; |
30 | import felix.dstruct.FelixQuery; |
31 | import felix.executor.DDExecutor; |
32 | import felix.executor.Executor; |
33 | import felix.io.HadoopPostgreSQLPopulator; |
34 | import felix.io.TestHadoop; |
35 | import felix.optimizer.Scheduler; |
36 | import felix.parser.FelixCommandOptions; |
37 | import felix.util.FelixConfig; |
38 | import felix.util.FelixUIMan; |
39 | |
40 | /** |
41 | * Felix, an operator-based statistical inference system. |
42 | * @author Ce Zhang |
43 | * |
44 | */ |
45 | public class Felix { |
46 | |
47 | /** |
48 | * Felix's query, which consists of program, query and evidence. |
49 | */ |
50 | public FelixQuery fq; |
51 | |
52 | /** |
53 | * Static Analyzer, which analyzes the property of each predicates according to rules. |
54 | */ |
55 | protected StaticAnalyzer sa; |
56 | |
57 | /** |
58 | * Database connection. |
59 | */ |
60 | public static RDB db; |
61 | |
62 | /** |
63 | * Command line option. |
64 | */ |
65 | public FelixCommandOptions options; |
66 | |
67 | /** |
68 | * Whether this Felix object run has loaded evidences. |
69 | */ |
70 | public boolean hasLoadedEvidence = false; |
71 | |
72 | /** |
73 | * Return Felix's query. |
74 | * @return |
75 | */ |
76 | public FelixQuery getFelixQuery(){ |
77 | return fq; |
78 | } |
79 | |
80 | /** |
81 | * Clear static parameters and initialize variables. |
82 | */ |
83 | public void resetACoupleAuxDataStructures(){ |
84 | |
85 | ConjunctiveQuery.clearIndexHistory(); |
86 | |
87 | Clause.mappingFromID2Const = new HashMap<Integer, String>(); |
88 | Clause.mappingFromID2Desc = new HashMap<String, String>(); |
89 | |
90 | } |
91 | |
92 | /** |
93 | * Load Felix query from program and query file. If -useEvid |
94 | * is used, evidence file will also be loaded by this function. |
95 | * @throws InterruptedException |
96 | */ |
97 | public FelixQuery parseFelixQuery() throws InterruptedException{ |
98 | |
99 | FelixUIMan.println(">>> Connecting to RDBMS at " + FelixConfig.db_url); |
100 | |
101 | if(FelixConfig.evidDBSchema != null){ |
102 | FelixConfig.db_schema = FelixConfig.evidDBSchema; |
103 | } |
104 | |
105 | db = RDB.getRDBbyConfig(); |
106 | db.resetSchema(FelixConfig.db_schema); |
107 | db.schema = FelixConfig.db_schema; |
108 | |
109 | FelixQuery fq = new FelixQuery(); |
110 | |
111 | String[] progFiles = options.fprog.split(","); |
112 | fq.loadPrograms(progFiles); |
113 | |
114 | if(options.fquery != null){ |
115 | String[] queryFiles = options.fquery.split(","); |
116 | fq.loadQueries(queryFiles); |
117 | } |
118 | |
119 | if(options.queryAtoms != null){ |
120 | FelixUIMan.println(">>> Parsing query atoms in command line"); |
121 | fq.parseQueryCommaList(options.queryAtoms); |
122 | } |
123 | |
124 | if(options.cwaPreds != null){ |
125 | String[] preds = options.cwaPreds.split(","); |
126 | for(String ps : preds){ |
127 | Predicate p = fq.getPredByName(ps); |
128 | if(p == null){ |
129 | fq.closeFiles(); |
130 | ExceptionMan.die("COMMAND LINE: Unknown predicate name -- " + ps); |
131 | }else{ |
132 | p.setClosedWorld(true); |
133 | } |
134 | } |
135 | } |
136 | |
137 | for(Predicate p : fq.getAllPred()){ |
138 | p.prepareDB(db); |
139 | } |
140 | |
141 | // whether we use evidence for static analyzer. |
142 | // if we do not load evidence here, it will be |
143 | // loaded in the Scheduler.schedule(). |
144 | if(options.useEvid == true){ |
145 | this.loadEvidence(); |
146 | } |
147 | return fq; |
148 | } |
149 | |
150 | /** |
151 | * Normalizes string by replacing whitespace |
152 | * @param script |
153 | * @return normalized string |
154 | */ |
155 | public String normalizeScript(String script){ |
156 | String rs = ""; |
157 | rs = script.replaceAll("^\t", ""); |
158 | rs = rs.replaceAll("^(\\t|\\s)*<~hdfs~(\\t|\\s)*", ""); |
159 | rs = rs.replaceAll("\n\t", "\n"); |
160 | return rs; |
161 | } |
162 | |
163 | /** |
164 | * Returns array list of TEXT |
165 | * @param n size of ArrayList |
166 | * @return ArrayList |
167 | */ |
168 | public ArrayList<String> getAllTextArray(int n){ |
169 | ArrayList<String> rs = new ArrayList<String>(); |
170 | for(int i=0;i<n;i++){ |
171 | rs.add("TEXT"); |
172 | } |
173 | return rs; |
174 | } |
175 | |
176 | /** |
177 | * Returns table name for given signature. |
178 | * @param metaTable |
179 | * @param signature |
180 | * @return |
181 | */ |
182 | public String giveMeTableNameIfExist(String metaTable, String signature){ |
183 | String ret = null; |
184 | |
185 | RDB db = RDB.getRDBbyConfig(FelixConfig.auxSchema); |
186 | |
187 | ResultSet rs = db.query("SELECT * FROM " + metaTable + " WHERE signature='" + signature + "'"); |
188 | |
189 | try { |
190 | while(rs.next()){ |
191 | ret = rs.getString(2); |
192 | return ret; |
193 | } |
194 | } catch (SQLException e) { |
195 | e.printStackTrace(); |
196 | } |
197 | |
198 | db.close(); |
199 | return ret; |
200 | } |
201 | |
202 | /** |
203 | * @deprecated |
204 | * @param p |
205 | * @return |
206 | */ |
207 | public String generateFESignature(FelixPredicate p){ |
208 | |
209 | |
210 | System.err.println(p.mapinputvar); |
211 | System.err.println(p.reduceinputkeyvar); |
212 | System.err.println(p.reduceinputvaluesvar); |
213 | |
214 | System.err.println(p.dependencyFile); |
215 | System.err.println(p.mapScript); |
216 | System.err.println(p.reduceScript); |
217 | System.err.println(p.mapinitScript); |
218 | System.err.println(p.xmltag); |
219 | |
220 | |
221 | |
222 | String ret = ""; |
223 | ret += p + "|||||" + StringMan.commaList(p.getArgs()) + "|||||"; |
224 | ret += p.mapinputvar + "|||||"; |
225 | ret += p.mapinitScript + "|||||"; |
226 | ret += p.reduceinputkeyvar + "|||||"; |
227 | ret += p.reduceinputvaluesvar + "|||||"; |
228 | ret += p.dependencyFile + "|||||"; |
229 | ret += p.mapScript + "|||||"; |
230 | ret += p.reduceScript + "|||||"; |
231 | ret += p.xmltag + "|||||"; |
232 | |
233 | ret = ret.replaceAll("\\s|\\t|\n|\r|\\n|\\r|'", "\""); |
234 | |
235 | return ret; |
236 | } |
237 | |
238 | /** |
239 | * Executor for the Blah Blah feature extraction language. |
240 | */ |
241 | public void blahblahExecutor(){ |
242 | |
243 | // TODO: current assumption is not cross-dependency, i.e., |
244 | // one jdbc predicate only relies on one hadoop predicates |
245 | ArrayList<FelixPredicate> trivialPlanner = new ArrayList<FelixPredicate>(); |
246 | for(FelixPredicate p : fq.getAllPred()){ |
247 | if(p.needExtractFeatures){ |
248 | if(p.dependencyFile != null){ |
249 | trivialPlanner.add(0, p); |
250 | }else if(p.jdbcdep != null){ |
251 | trivialPlanner.add(trivialPlanner.size(), p); |
252 | } |
253 | } |
254 | } |
255 | |
256 | // first extract features |
257 | for(FelixPredicate p : trivialPlanner){ |
258 | |
259 | if(p.needExtractFeatures){ |
260 | |
261 | if(FelixConfig.auxSchema == null){ |
262 | ExceptionMan.die("You must provide a schema to save result files from hadoop using -auxSchema option!"); |
263 | } |
264 | |
265 | String signature = this.generateFESignature(p); |
266 | //String isInTable = this.giveMeTableNameIfExist(metaTable, signature); |
267 | |
268 | String rsTableName = "_fe_raw_" + p.getName(); |
269 | |
270 | //disable re-use |
271 | FelixConfig.forceExt = true; |
272 | String isInTable = null; |
273 | |
274 | if(isInTable != null && !FelixConfig.forceExt){ |
275 | UIMan.println(">>> The feature extraction code for " + p + |
276 | " is cached and does not need to be extracted agian!"); |
277 | rsTableName = isInTable; |
278 | |
279 | }else{ |
280 | |
281 | RDB db = RDB.getRDBbyConfig(); |
282 | db.dropTable(FelixConfig.auxSchema + "." + rsTableName); |
283 | db.close(); |
284 | |
285 | //db.execute("SET search_path = " + FelixConfig.auxSchema); |
286 | //db.execute("DELETE FROM " + metaTable + " WHERE tableName='" + rsTableName + "'"); |
287 | //db.commit(); |
288 | //db.close(); |
289 | |
290 | UIMan.println(">>> Extracting Features for predicate " + p + " using " + p.dependencyFile); |
291 | |
292 | String tmpFileName = "rsFileOnHadoop_" + |
293 | (new Date()).getTime() + |
294 | "_pred_" + p.getName(); |
295 | |
296 | if(p.jdbcdep != null){ |
297 | |
298 | String relyOn = normalizeScript(p.jdbcdep).replace("jdbc://", ""); |
299 | Pattern pp = Pattern.compile("(.*?)\\((.*?)\\)"); |
300 | Matcher m = pp.matcher(relyOn); |
301 | m.find(); |
302 | String pred = m.group(1); |
303 | String[] vairableName = m.group(2).split(","); |
304 | |
305 | String tableName = "_fe_raw_" + pred; |
306 | String fileName = FelixConfig.hdfsServer + tmpFileName + "_ori_" + pred + "_for_" + p.getName(); |
307 | |
308 | UIMan.println(">>> Dumping database table " + tableName + " to HDFS..."); |
309 | HadoopPostgreSQLPopulator.dumpTableToHDFS(FelixConfig.auxSchema, |
310 | tableName, |
311 | vairableName.length, |
312 | fileName); |
313 | |
314 | String[] toPass = { |
315 | |
316 | "standard", |
317 | |
318 | normalizeScript(fileName), |
319 | |
320 | FelixConfig.hdfsServer + tmpFileName, |
321 | |
322 | normalizeScript(p.mapScript), |
323 | |
324 | normalizeScript(p.reduceScript), |
325 | |
326 | p.mapinputvar, |
327 | |
328 | p.reduceinputkeyvar, |
329 | |
330 | p.reduceinputvaluesvar, |
331 | |
332 | normalizeScript(p.mapinitScript), |
333 | |
334 | normalizeScript(p.reduceinitScript) |
335 | |
336 | }; |
337 | |
338 | try { |
339 | TestHadoop.executeHadoopProgram(toPass); |
340 | //TestHadoop.post(toPass); |
341 | } catch (Exception e) { |
342 | e.printStackTrace(); |
343 | ExceptionMan.die("Hadoop Error!"); |
344 | } |
345 | |
346 | }else if(p.xmltag != null){ |
347 | |
348 | String[] toPass = { |
349 | |
350 | "xml", |
351 | |
352 | normalizeScript(p.dependencyFile), |
353 | |
354 | FelixConfig.hdfsServer + tmpFileName, |
355 | |
356 | normalizeScript(p.mapScript), |
357 | |
358 | normalizeScript(p.reduceScript), |
359 | |
360 | p.mapinputvar, |
361 | |
362 | p.reduceinputkeyvar, |
363 | |
364 | p.reduceinputvaluesvar, |
365 | |
366 | normalizeScript(p.mapinitScript), |
367 | |
368 | normalizeScript(p.reduceinitScript), |
369 | |
370 | "<" + p.xmltag + "", |
371 | |
372 | "</" + p.xmltag + ">" |
373 | |
374 | }; |
375 | |
376 | try { |
377 | TestHadoop.executeHadoopProgram(toPass); |
378 | //TestHadoop.post(toPass); |
379 | } catch (Exception e) { |
380 | e.printStackTrace(); |
381 | ExceptionMan.die("Hadoop Error!"); |
382 | } |
383 | |
384 | }else{ |
385 | |
386 | String[] toPass = { |
387 | |
388 | "standard", |
389 | |
390 | normalizeScript(p.dependencyFile), |
391 | |
392 | FelixConfig.hdfsServer + tmpFileName, |
393 | |
394 | normalizeScript(p.mapScript), |
395 | |
396 | normalizeScript(p.reduceScript), |
397 | |
398 | p.mapinputvar, |
399 | |
400 | p.reduceinputkeyvar, |
401 | |
402 | p.reduceinputvaluesvar, |
403 | |
404 | normalizeScript(p.mapinitScript), |
405 | |
406 | normalizeScript(p.reduceinitScript) |
407 | |
408 | }; |
409 | |
410 | try { |
411 | TestHadoop.executeHadoopProgram(toPass); |
412 | //TestHadoop.post(toPass); |
413 | } catch (Exception e) { |
414 | e.printStackTrace(); |
415 | ExceptionMan.die("Hadoop Error!"); |
416 | } |
417 | |
418 | } |
419 | |
420 | try { |
421 | |
422 | ArrayList<String> tableArgs = (ArrayList<String>) p.getArgs().clone(); |
423 | ArrayList<String> tableTypes = getAllTextArray(p.arity()); |
424 | |
425 | if(p.getArgs().size() == 1){ |
426 | tableArgs.add("I_am_just_a_sad_meaningless_value"); |
427 | tableTypes.add("TEXT"); |
428 | } |
429 | |
430 | UIMan.verbose(0, ">>> Dump to database table for relation " + p.toString()); |
431 | HadoopPostgreSQLPopulator.createAndPopulateTableFromDir |
432 | (FelixConfig.auxSchema, rsTableName, |
433 | tableArgs, tableTypes, |
434 | FelixConfig.hdfsServer + tmpFileName + "_dir", FelixConfig.nReduce); |
435 | } catch (Exception e) { |
436 | e.printStackTrace(); |
437 | } |
438 | } |
439 | |
440 | } |
441 | } |
442 | |
443 | } |
444 | |
445 | /** |
446 | * Load evidence from evidence from file or database table. |
447 | * @throws InterruptedException |
448 | */ |
449 | public void loadEvidence() throws InterruptedException{ |
450 | |
451 | FelixConfig.mixturedLoading = false; |
452 | |
453 | boolean isOnlyFromFile = true; |
454 | |
455 | for(FelixPredicate fp : fq.getAllPred()){ |
456 | if(fp.needExtractFeatures || fp.loadFromDatabase){ |
457 | isOnlyFromFile = false; |
458 | FelixConfig.mixturedLoading = true; |
459 | } |
460 | } |
461 | |
462 | if(options.fevid != null && isOnlyFromFile){ |
463 | String[] evidFiles = options.fevid.split(","); |
464 | fq.loadEvidences(evidFiles); |
465 | fq.materializeTables(db); |
466 | }else{ |
467 | |
468 | // first, generate pure-string form tables |
469 | FelixConfig.mixturedLoading = true; |
470 | |
471 | HashMap<FelixPredicate, String> strTableName = |
472 | new HashMap<FelixPredicate, String>(); |
473 | |
474 | // first, load from evid file |
475 | if(options.fevid != null){ |
476 | String[] evidFiles = options.fevid.split(","); |
477 | fq.loadEvidences(evidFiles); |
478 | } |
479 | for(FelixPredicate fp : fq.getAllPred()){ |
480 | if(!fp.needExtractFeatures && !fp.loadFromDatabase){ |
481 | fp.flushStrEvidence("_tmp_str_loading_" + fp.getName()); |
482 | strTableName.put(fp, "_tmp_str_loading_" + fp.getName()); |
483 | } |
484 | } |
485 | |
486 | // second, from db table |
487 | for(FelixPredicate fp : fq.getAllPred()){ |
488 | if(fp.loadFromDatabase){ |
489 | String rName = fp.loadingSchema + "." + fp.loadingTable; |
490 | strTableName.put(fp, rName); |
491 | } |
492 | } |
493 | |
494 | // third, blahblah program |
495 | this.blahblahExecutor(); |
496 | for(FelixPredicate fp : fq.getAllPred()){ |
497 | if(fp.needExtractFeatures){ |
498 | strTableName.put(fp, FelixConfig.auxSchema + "." + "_fe_raw_" + fp.getName()); |
499 | } |
500 | } |
501 | |
502 | RDB db = RDB.getRDBbyConfig(Config.db_schema); |
503 | db.dropSequence("_tmp_seq_id"); |
504 | db.execute("CREATE SEQUENCE _tmp_seq_id START " + (fq.mapIDConstant.size() + 1) + ";"); |
505 | //second, build constant table |
506 | HashMap<String, ArrayList<String>> typeDomain = new HashMap<String, ArrayList<String>>(); |
507 | // first, flush constants in program |
508 | for(FelixPredicate fp : fq.getAllPred()){ |
509 | for(int i=0;i<fp.arity();i++){ |
510 | Type type = fp.getTypeAt(i); |
511 | String tName = type.name; |
512 | if(type.isNonSymbolicType()){ |
513 | continue; |
514 | } |
515 | if(!typeDomain.containsKey(tName)){ |
516 | typeDomain.put(tName, new ArrayList<String>()); |
517 | |
518 | String sql = "CREATE TABLE type_" + tName + " (constantid bigint, constantvalue TEXT); "; |
519 | db.execute(sql); |
520 | db.commit(); |
521 | |
522 | for(Integer constant : type.getDomain()){ |
523 | |
524 | sql = "INSERT INTO type_" + tName + " VALUES (" |
525 | + constant + ", " + SQLMan.escapeString(fq.mapIDConstant.get(constant)) |
526 | + ")"; |
527 | db.execute(sql); |
528 | |
529 | } |
530 | |
531 | |
532 | } |
533 | } |
534 | } |
535 | |
536 | |
537 | for(FelixPredicate fp : fq.getAllPred()){ |
538 | |
539 | if(!strTableName.containsKey(fp)){ |
540 | continue; |
541 | } |
542 | |
543 | for(int i=0;i<fp.arity();i++){ |
544 | Type type = fp.getTypeAt(i); |
545 | if(type.isNonSymbolicType()){ |
546 | continue; |
547 | } |
548 | String tName = type.name; |
549 | if(!typeDomain.containsKey(tName)){ |
550 | typeDomain.put(tName, new ArrayList<String>()); |
551 | } |
552 | typeDomain.get(tName).add("SELECT " + fp.getArgs().get(i) |
553 | + " constantVALUE FROM " + strTableName.get(fp)); |
554 | } |
555 | } |
556 | |
557 | |
558 | ArrayList<String> allTypeSQLs = new ArrayList<String>(); |
559 | for(String tName : typeDomain.keySet()){ |
560 | if(typeDomain.get(tName).size() == 0){ |
561 | continue; |
562 | } |
563 | String sql = "INSERT INTO type_" + tName + " " + |
564 | "SELECT nextval('_tmp_seq_id') constantID, constantVALUE FROM " + |
565 | "(SELECT DISTINCT constantVALUE FROM " + |
566 | "(" + StringMan.join(" UNION ALL ", typeDomain.get(tName)) + " ) nt " + |
567 | " WHERE constantVALUE NOT IN (SELECT constantvalue FROM type_" + tName + ")" + |
568 | ") wt"; |
569 | allTypeSQLs.add("SELECT * FROM type_" + tName); |
570 | UIMan.print(">>> Create table for type " + tName + "..."); |
571 | db.execute(sql); |
572 | |
573 | UIMan.print("*"); |
574 | sql = "CREATE INDEX _idx_type_id" + tName + " on type_" + tName + " (constantID) "; |
575 | db.dropIndex("_idx_type_id" + tName); |
576 | db.execute(sql); |
577 | db.analyze("type_" + tName); |
578 | UIMan.println(""); |
579 | } |
580 | |
581 | String crel = Config.relConstants; |
582 | db.dropView(crel); |
583 | db.dropTable(crel); |
584 | String sql = "CREATE VIEW " + crel + |
585 | " AS SELECT constantID::bigint id, constantVALUE string FROM (" + |
586 | StringMan.join(" UNION ALL ", allTypeSQLs) + " ) nt"; |
587 | db.execute(sql); |
588 | |
589 | //third, flush to Predicate table. |
590 | for(FelixPredicate fp : fq.getAllPred()){ |
591 | String rawTable = strTableName.get(fp); |
592 | String currentTable = strTableName.get(fp); |
593 | |
594 | UIMan.print(">>> Create ID table for " + fp); |
595 | |
596 | // first join the string table with constant map |
597 | for(int i=0 ; i<fp.arity(); i++){ |
598 | Type type = fp.getTypeAt(i); |
599 | |
600 | if(type.isNonSymbolicType()){ |
601 | continue; |
602 | } |
603 | |
604 | String typeTable = "type_" + type.name; |
605 | |
606 | String column = fp.getArgs().get(i); |
607 | |
608 | String newTable = rawTable + "_ser_" + column; |
609 | |
610 | ArrayList<String> selList = new ArrayList<String>(); |
611 | selList.add("t0.truth"); |
612 | selList.add("t0.prior"); |
613 | for(int j=0 ; j<fp.arity(); j++){ |
614 | if(j==i){ |
615 | selList.add("t1.constantid::bigint " + fp.getArgs().get(j)); |
616 | }else{ |
617 | selList.add("t0." + fp.getArgs().get(j)); |
618 | } |
619 | } |
620 | |
621 | db.dropTable(newTable); |
622 | db.dropView(newTable); |
623 | sql = "CREATE TABLE " + newTable + " AS SELECT " + StringMan.commaList(selList) + " FROM " + |
624 | currentTable + " t0, " + typeTable + " t1" + " WHERE " + |
625 | "t0." + column + "=" + "t1.constantvalue"; |
626 | |
627 | UIMan.print("."); |
628 | db.execute(sql); |
629 | currentTable = newTable; |
630 | } |
631 | |
632 | |
633 | |
634 | // second, copy to pred_* table |
635 | ArrayList<String> insertList = new ArrayList<String>(); |
636 | ArrayList<String> selList = new ArrayList<String>(); |
637 | |
638 | //insertList.add("truth"); |
639 | //insertList.add("prior"); |
640 | //insertList.add("club"); |
641 | |
642 | db.dropSequence("_tmp_seq_id_" + fp.getName()); |
643 | db.execute("CREATE SEQUENCE _tmp_seq_id_" + fp.getName() + " START 1;"); |
644 | |
645 | //selList.add("nextval('_tmp_seq_id_" + fp.getName()+"')"); |
646 | //selList.add("NULL::bigint"); |
647 | selList.add("truth::Bool"); |
648 | selList.add("prior::Float"); |
649 | selList.add("2::INT"); |
650 | selList.add("NULL::INT"); |
651 | for(int i=0 ; i<fp.arity(); i++){ |
652 | |
653 | Type type = fp.getTypeAt(i); |
654 | if(type.isNonSymbolicType()){ |
655 | insertList.add(fp.getArgs().get(i) + ""); |
656 | selList.add(fp.getArgs().get(i) + "::" + type.getNonSymbolicTypeInSQL()); |
657 | }else{ |
658 | insertList.add(fp.getArgs().get(i) + ""); |
659 | selList.add(fp.getArgs().get(i) + "::bigint"); |
660 | } |
661 | } |
662 | |
663 | /* |
664 | sql = "INSERT INTO " + fp.getRelName() + |
665 | " ( " + StringMan.commaList(insertList) + " ) " + |
666 | " SELECT " + StringMan.commaList(selList) + " FROM " + currentTable; |
667 | */ |
668 | |
669 | |
670 | //sql += "id SERIAL PRIMARY KEY,\n"; |
671 | //sql += "truth BOOL,\n"; |
672 | //sql += "prior FLOAT,\n"; |
673 | //sql += "club INT DEFAULT 0,\n"; |
674 | //sql += "atomID INT DEFAULT NULL,\n"; |
675 | |
676 | //db.dropTable(fp.getRelName()); |
677 | |
678 | sql = "INSERT INTO " + fp.getRelName() + "(truth, " + |
679 | "prior, club, atomID, " + |
680 | StringMan.commaList(insertList) + " ) " + |
681 | " SELECT " + StringMan.commaList(selList) |
682 | + " FROM " + currentTable; |
683 | |
684 | |
685 | UIMan.print("*"); |
686 | db.execute(sql); |
687 | db.commit(); |
688 | |
689 | UIMan.println(""); |
690 | } |
691 | |
692 | db.close(); |
693 | |
694 | } |
695 | |
696 | fq.executeAllDatalogRules(db); |
697 | |
698 | hasLoadedEvidence = true; |
699 | |
700 | } |
701 | |
702 | /** |
703 | * Clean up temporary data: the schema in PostgreSQL and the working directory. |
704 | */ |
705 | protected void cleanUp(){ |
706 | Config.exiting_mode = true; |
707 | UIMan.println(">>> Cleaning up temporary data"); |
708 | if(!Config.keep_db_data){ |
709 | UIMan.print(" Removing database schema '" + Config.db_schema + "'..."); |
710 | UIMan.println(db.dropSchema(Config.db_schema)?"OK" : "FAILED"); |
711 | }else{ |
712 | UIMan.println(" Data remains in schema '" + Config.db_schema + "'."); |
713 | } |
714 | db.close(); |
715 | |
716 | UIMan.print(" Removing temporary dir '" + Config.getWorkingDir() + "'..."); |
717 | UIMan.println(FileMan.removeDirectory(new File(Config.getWorkingDir()))?"OK" : "FAILED"); |
718 | |
719 | UIMan.println("*** Felix exited at " + Timer.getDateTime() + " after running for " + Timer.elapsed()); |
720 | UIMan.closeDribbleFile(); |
721 | |
722 | |
723 | for(RDB db : RDB.historyInstances){ |
724 | db.close(); |
725 | } |
726 | |
727 | System.exit(0); |
728 | |
729 | } |
730 | |
731 | /** |
732 | * Run Felix! |
733 | * @param args Command line options. |
734 | * @throws InterruptedException |
735 | */ |
736 | public void run(FelixCommandOptions opt) throws InterruptedException{ |
737 | |
738 | Timer.start("Felix-Timer"); |
739 | this.options = opt; |
740 | resetACoupleAuxDataStructures(); |
741 | |
742 | FelixConfig.allRuleAsMLN = this.options.allRuleAsMLN; |
743 | FelixConfig.allView = this.options.allView; |
744 | FelixConfig.allMat = this.options.allMat; |
745 | FelixConfig.hadoopLocal = this.options.local; |
746 | FelixConfig.auxSchema = this.options.auxSchema; |
747 | FelixConfig.forceExt = this.options.forceExt; |
748 | FelixConfig.nReduce = this.options.nReduce; |
749 | |
750 | FelixConfig.hdfsServer = this.options.hdfs; |
751 | FelixConfig.mrServer = this.options.mapreduce; |
752 | |
753 | FelixConfig.nDDIT = this.options.nDD; |
754 | |
755 | fq = this.parseFelixQuery(); |
756 | |
757 | sa = new StaticAnalyzer(this.fq, options); |
758 | sa.parse(); |
759 | |
760 | Scheduler sc = new Scheduler(this, this.fq, options); |
761 | ExecutionPlan ep = sc.schedule(); |
762 | |
763 | if(options.useDualDecomposition){ |
764 | DDExecutor ec = new DDExecutor(ep, fq, options); |
765 | ec.run(); |
766 | }else{ |
767 | Executor ec = new Executor(ep, fq, options); |
768 | ec.run(); |
769 | } |
770 | |
771 | cleanUp(); |
772 | } |
773 | |
774 | } |
775 | |
776 | |
777 | |
778 | |