/********** Learning Method **************/
% here are quite a few possibile combination.
set(learning_mode,iterative_covering).
%set(learning_mode,generalAll_thenSelection).

/********** single-seed or compound-seed? ************/
set(generalization_method,solo_generalization).	set(minimumNum,0).
%set(generalization_method,co_generalization).		 set(minimumNum,1). % number of minimums, if not set, then default. 



/********** Selection Method ************/
set(hypotheses_SelectionMethod,coveringStyle). % the basic covering algorithm 


/********* Cross Validation *********/
set(cross_validation_method,leave_one_out).
%set(cross_validation_method,nonSingleEx_fold). % provide fold information. 

set(ignore_predictDefault,yes).
%set(ignore_defaultPredicted,no).if you choose this one, then you need to add back those default % this will save the time to proving those will already


% autamatically divide into 10 fold. or manually devide. 


%set(default_evaluation,yes).

%set(incremental_NTBound,yes).


it_cover(PosEIs,NegEIs,HypothesisTI):-
	(cover(PosEIs,NegEIs,[s],[],HypothesisTI)-> %[s] is initiation of NTsSoFar
		retract(nt_bound(X));
		retract(nt_bound(X)),NX is X+1,asserta(nt_bound(NX)),
		it_cover(PosEIs,NegEIs,HypothesisTI)
	).


learn(PosEIs0,NegEIs0,HypothesisTI):-
	set(incremental_NTBound,yes),set(learning_mode,iterative_covering),!,
reorderEx_byLength(PosEIs0,PosEIs),reorderEx_byLength(NegEIs0,NegEIs),
	asserta(nt_bound(1)),
	it_cover(PosEIs,NegEIs,HypothesisTI).



% This was what used in the proceeding version
learn(PosEIs0,NegEIs0,HypothesisTI):-
	set(learning_mode,iterative_covering),!,
	
	reorderEx_byLength(PosEIs0,PosEIs),reorderEx_byLength(NegEIs0,NegEIs),
	cover(PosEIs,NegEIs,[s],[],HypothesisTI).

/*	(set(keep_ex_originalOrdering,yes)->
		PosEIs=PosEIs0;
		length(PosEIs0,Num_of_posEx), randseq(Num_of_posEx,Num_of_posEx,Indexes),maplist(indexMapping(PosEIs0),Indexes,PosEIs)
	),*/
/*	reorderEx_byLength(PosEIs0,PosEIs),
	write('% Example Order'),write(PosEIs),nl,
	cover(PosEIs,NegEIs,[],HypothesisTI).
	%print_list(HypothesisTI). %tInterpreter(HypothesisTI,Hypothesis).
*/

% negative examples are not used so far
learn(PosEIs,NegEIs,Hypothesis):-
	set(learning_mode,generalAll_thenSelection),!,
tell('candidateH.pl'),
	genAll(PosEIs),
told,
consult('candidateH.pl'),
index(HIs),
global_covering(PosEIs,NegEIs,HIs,[],Hypothesis).
	%tInterpreter(HypothesisTI,Hypothesis).
	% indentifySharedClauses(PosEIs).

/*
	prefixTreeBuilder(PosEIs),
	claNode(startNode,StartNodeCoverage),
	%tell('tomato_hypothesisSelection_moduleGeneralization.txt'),
	hypothesisSelection(startNode-StartNodeCoverage,Hypothesis),
	nl.
*/




run:-	yap_flag(argv,[InputFile1,InputFile2]),
 	consult(InputFile2),
	training_examples(TrainEIs),%write(TrainEIs),
	test_examples(TestEIs),

	(set(etoB,yes)->
		maplist(removeTestEI,TestEIs,RemovedEIsDetail); 
		Foo=0 	% do nothing
	),
	gen_output_readIn_candiateH(TrainEIs),
	indentifySharedHs(TrainEIs,T_EITIs),
	outputToC(TrainEIs,T_EITIs).

% ideally, you need to remove training data when learning, but since you 

run:-	yap_flag(argv,[InputFile1,InputFile2]),
 	consult(InputFile2),
	numbersList(1,25,EIs),
	genAll(EIs).

l:-
numbersList(1,25,PosEIs),
%NegEIs=[], %
numbersList(100,125,NegEIs),
learn(PosEIs,NegEIs,T), write(T).

/*
:-	yap_flag(argv,[InputFile1,InputFile2]),
 	consult(InputFile2),	
	findall(EIp,ex(EIp,Ep,1),PosEI0s), % full size learning
	findall(EIn,ex(EIn,En,0),NegEIs),
	length(NegEIs,NK),
	unCoverRecord(PosEI0s,PosEIs), % reduce = remove those already explained by the examples
	length(PosEIs,PK),
	append(PosEIs,NegEIs,AllEI),
	write({PK,NK}),write('Totally '),nl,
	cross_validation(AllEI,PredictiveAccurracy),
	write('Predictive Accuracy is'),write(PredictiveAccurracy),nl,
	nl.%told.
*/

%r:-	


latestExperiment:-	yap_flag(argv,[InputFile1,InputFile2,TestID0]),
	letter_to_number(TestID0,TestID),
	consult(InputFile2),
	%TestID=10,
	findall(EIp,ex(EIp,Ep,1),PosEI0s), % full size learning
	findall(EIn,ex(EIn,En,0),NegEIs),
	length(NegEIs,NK),
	unCoverRecord(PosEI0s,PosEIs), % reduce = remove those already explained by the examples
	length(PosEIs,PK),
	append(PosEIs,NegEIs,AllEI),
	write('%'),write({PK,NK}),write('Totally '),nl,	
((member(TestID,PosEIs);member(TestID,NegEIs)) ->
	oneTest(AllEI,TestID,TestResult);
	TestResult=100.0
),
	tell('oneSample_oneFold_PA.txt'),
	portray_clause(oneFoldPA(TestResult)),nl,told,nl,
	nl.%told.


% time and space
ts:-	%yap_flag(argv,[InputFile1,InputFile2]),
	%consult(InputFile2),
	findall(EIp,ex(EIp,Ep,1),PosEI0s), % full size learning
	findall(EIn,ex(EIn,En,0),NegEIs),
	length(NegEIs,NK),
	unCoverRecord(PosEI0s,PosEIs), % reduce = remove those already explained by the examples
	length(PosEIs,PK),
	append(PosEIs,NegEIs,AllEI),
	write('%'),write({PK,NK}),write('Totally '),nl,	
	
	statistics(cputime,[Total1,Previous]),
	learn(PosEIs,NegEIs,FinalTI),
	statistics(cputime,[Total2,TimeTaken]), 

	tell('oneSample_time.txt'),
	write(TimeTaken),write(' '),
	told,
	tInterpreter(FinalTI,FinalT),
		%write('*** Learning Result is'), nl,
		print_list(FinalT),
	nl,nl.%told.


/* generalise:-
	findall(EIp,ex(EIp,Ep,1),PosEIs), % full size learning
	findall(EIn,ex(EIn,En,0),NegEIs),
	statistics(cputime,[Total1,Previous]),
	coGeneralization(PosEIs,NegEIs,FinalTI),
tInterpreter(FinalTI,FinalT),
nl,nl,write('---- Suggested Hypothesis ----'),nl,
	writeTheory(FinalT),nl,nl,

	statistics(cputime,[Total2,TimeTaken]),
	write('Total Time Taken is '), write(TimeTaken).*/


ggg:- generalise.

generalise:-
	findall(PosEI,ex(PosEI,PosEx,1),PosList),
	findall(NegEI,ex(NegEI,NegEx,0),NegList),
	learn(PosList,NegList,FinalTI),
	tInterpreter(FinalTI,FinalT),
	nl,nl,write('---- Suggested Hypothesis ----'),nl,
	writeTheory(FinalT).

