//load data
vardata=Data.Benchmark.Factory.Golf();//set parameters for reduct factory
varparm=newArgs();parm.SetParameter(ReductFactoryOptions.DecisionTable,data);parm.SetParameter(ReductFactoryOptions.ReductType,ReductTypes.ApproximateDecisionReduct);parm.SetParameter(ReductFactoryOptions.FMeasure,(FMeasure)FMeasures.Majority);parm.SetParameter(ReductFactoryOptions.Epsilon,0.05);//compute reducts
varreducts=ReductFactory.GetReductGenerator(parm).GetReducts();//output reducts and attributes
foreach(IReductreductinreducts)Console.WriteLine(reduct.Attributes.ToArray().ToStr());
//load benchmark data
vardata=Data.Benchmark.Factory.Zoo();//set object weights using r(u) weighting scheme
data.SetWeights(newWeightGeneratorRelative(data).Weights);//split data into training and testing sets
DecisionTabletrain,test;varsplitter=newDataSplitterRatio(data,0.8);splitter.Split(outtrain,outtest);//set parameters for reduct factory
varparm=newArgs();parm.SetParameter(ReductFactoryOptions.DecisionTable,train);parm.SetParameter(ReductFactoryOptions.ReductType,ReductTypes.ApproximateDecisionReduct);parm.SetParameter(ReductFactoryOptions.FMeasure,(FMeasure)FMeasures.MajorityWeighted);parm.SetParameter(ReductFactoryOptions.Epsilon,0.05);//compute reducts
varreductGenerator=ReductFactory.GetReductGenerator(parm);varreducts=reductGenerator.GetReducts();//select 10 reducts with least number of attributes
varbestReduct=reducts.OrderBy(r=>r.Attributes.Count).Take(10);//create decision rules based on reducts
vardecisionRules=newReductDecisionRules(bestReducts);//when test instance is not recognized
//set output as unclassified
decisionRules.DefaultOutput=null;//classify test data
varresult=Classifier.DefaultClassifer.Classify(decisionRules,test);//output accuracy and coverage
Console.WriteLine("Accuracy: {0}",result.Accuracy);
//load training data set
vartrain=Data.Benchmark.Factory.Dna();//setup reduct factory parameters
Argsparms=newArgs();parms.SetParameter(ReductFactoryOptions.DecisionTable,train);parms.SetParameter(ReductFactoryOptions.ReductType,ReductTypes.GeneralizedMajorityDecision);parms.SetParameter(ReductFactoryOptions.WeightGenerator,newWeightGeneratorMajority(train));parms.SetParameter(ReductFactoryOptions.Epsilon,0.05);parms.SetParameter(ReductFactoryOptions.PermutationCollection,newPermutationCollection(10,train.SelectAttributeIds(a=>a.IsStandard).ToArray()));//generate reducts
varreductGenerator=ReductFactory.GetReductGenerator(parms);varreducts=reductGenerator.GetReducts();
//load training and test data sets
vartrain=Data.Benchmark.Factory.Dna();vartest=Data.Benchmark.Factory.DnaTest();//setup reduct factory parameters
Argsparms=newArgs();parms.SetParameter(ReductFactoryOptions.DecisionTable,train);parms.SetParameter(ReductFactoryOptions.ReductType,ReductTypes.GeneralizedMajorityDecision);parms.SetParameter(ReductFactoryOptions.WeightGenerator,newWeightGeneratorMajority(train));parms.SetParameter(ReductFactoryOptions.Epsilon,0.05);parms.SetParameter(ReductFactoryOptions.PermutationCollection,newPermutationCollection(10,train.SelectAttributeIds(a=>a.IsStandard).ToArray()));parms.SetParameter(ReductFactoryOptions.UseExceptionRules,true);//generate reducts with exceptions
varreductGenerator=ReductFactory.GetReductGenerator(parms);varreducts=reductGenerator.GetReducts();foreach(varreductinreducts){varr=reductasReductWithExceptions;foreach(varexceptioninr.Exceptions){Console.WriteLine(exception.Attributes.ToArray().ToStr());Console.WriteLine(exception.SupportedObjects.ToArray().ToStr());}}varrules=newReductDecisionRules(reducts);rules.DecisionIdentificationMethod=RuleQualityMethods.Confidence;rules.RuleVotingMethod=RuleQualityMethods.SingleVote;rules.Learn(train,null);//classify test data set
varresult=Classifier.Default.Classify(rules,test);//show results
Console.WriteLine(result);
vardata=Data.Benchmark.Factory.Vehicle();DecisionTabletrain,test;varsplitter=newDataSplitterRatio(data,0.8);splitter.Split(outtrain,outtest);vartableDiscretizer=newTableDiscretizer(newIDiscretizer[]{//try to discretize using Fayyad MDL Criterion
newDiscretizeFayyad(),//in case Fayyad MDL is to strict
//use standard entropy and 5 buckets
newDiscretizeEntropy(5)});tableDiscretizer.FieldsToDiscretize=train.SelectAttributeIds(a=>a.IsStandard&&a.CanDiscretize());varfilter=newDiscretizeFilter();filter.TableDiscretizer=tableDiscretizer;filter.Compute(train);foreach(intattributeIdintableDiscretizer.FieldsToDiscretize){varfieldDiscretizer=filter.GetAttributeDiscretizer(attributeId);Console.WriteLine("Attribute {0} was discretized with {1}",attributeId,fieldDiscretizer.GetType().Name);Console.WriteLine("Computed Cuts: {0}",fieldDiscretizer.Cuts.ToStr());}vartrainDisc=filter.Apply(train);vartestDisc=filter.Apply(test);
//load training and testing DNA (spieces) data sets
vartrain=Data.Benchmark.Factory.Dna();vartest=Data.Benchmark.Factory.DnaTest();//set weights
varweightGen=newWeightGeneratorConstant(train,1.0/(double)train.NumberOfRecords);train.SetWeights(weightGen.Weights);//create parameters for reduct factory
varparm=newArgs();parm.SetParameter(ReductFactoryOptions.ReductType,ReductTypes.ApproximateDecisionReduct);parm.SetParameter(ReductFactoryOptions.FMeasure,(FMeasure)FMeasures.MajorityWeighted);parm.SetParameter(ReductFactoryOptions.Epsilon,0.05);parm.SetParameter(ReductFactoryOptions.NumberOfReducts,100);parm.SetParameter(ReductFactoryOptions.ReductComparer,ReductRuleNumberComparer.Default);parm.SetParameter(ReductFactoryOptions.SelectTopReducts,1);//create weak classifier prototype
varprototype=newReductDecisionRules();prototype.ReductGeneratorArgs=parm;//create ada boost ensemble
varadaBoost=newAdaBoost<ReductDecisionRules>(prototype);adaBoost.Learn(train,train.SelectAttributeIds(a=>a.IsStandard).ToArray());//classify test data set
varresult=Classifier.Default.Classify(adaBoost,test);//print result header & result
Console.WriteLine(ClassificationResult.TableHeader());Console.WriteLine(result);
//load training and testing DNA (spieces) data sets
vartrain=Data.Benchmark.Factory.Dna();vartest=Data.Benchmark.Factory.DnaTest();//set weights
varweightGen=newWeightGeneratorConstant(train,1.0/(double)train.NumberOfRecords);train.SetWeights(weightGen.Weights);//create parameters for reduct factory
varparm=newArgs();parm.SetParameter(ReductFactoryOptions.ReductType,ReductTypes.ApproximateDecisionReduct);parm.SetParameter(ReductFactoryOptions.FMeasure,(FMeasure)FMeasures.MajorityWeighted);parm.SetParameter(ReductFactoryOptions.Epsilon,0.05);parm.SetParameter(ReductFactoryOptions.NumberOfReducts,100);parm.SetParameter(ReductFactoryOptions.ReductComparer,ReductRuleNumberComparer.Default);parm.SetParameter(ReductFactoryOptions.SelectTopReducts,1);//create weak classifier prototype
varprototype=newReductDecisionRules();prototype.ReductGeneratorArgs=parm;//create ada boost ensemble
varadaBoost=newAdaBoost<ReductDecisionRules>(prototype);adaBoost.Learn(train,train.SelectAttributeIds(a=>a.IsStandard).ToArray());//classify test data set
varresult=Classifier.Default.Classify(adaBoost,test);//print result header & result
Console.WriteLine(ClassificationResult.TableHeader());Console.WriteLine(result);
//load data from a CSV file
vardata=DecisionTable.Load("german.data",FileFormat.CSV);DecisionTabletrain,test;varsplitter=newDataSplitterRatio(data,0.8);splitter.Split(outtrain,outtest);//Initialize and Learn Random Forest
varforest=newDecisionForestRandom<DecisionTreeC45>();forest.Size=500;forest.Learn(train,train.SelectAttributeIds(a=>a.IsStandard).ToArray());//Validate on test data set
varresult=Classifier.Default.Classify(forest,test);//Output the results
Console.WriteLine(result);
//load data
vardata=DecisionTable.Load("data.txt",FileFormat.CSV);//create 10-fold 25-repeated cross validation
varcv=newCrossValidation(data,10,25);//create C4.5 decision tree and run cv evaluation
varc45=newDecisionTreeC45();varresult=cv.Run<DecisionTreeC45>(c45);//output result
Console.WriteLine("Train Error: {0}",result.Error);