aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorleshe4ka46 <alex9102naid1@ya.ru>2025-12-09 11:42:25 +0300
committerleshe4ka46 <alex9102naid1@ya.ru>2025-12-09 11:42:25 +0300
commit72b4edeadeafc9c54b3db9b0961a45da3d07b77c (patch)
treefd472d1be885fc9856a3426e8aa794d9aee968c5
parentf60863aecfdfb2a7a35c9d2d4233142ca17c9152 (diff)
ar, tree
-rw-r--r--R_AR/Rplots.pdfbin1627897 -> 149693 bytes
-rw-r--r--R_AR/aaaaaaa.md14
-rw-r--r--R_AR/conviction.pdfbin0 -> 914847 bytes
-rw-r--r--R_AR/image.pngbin0 -> 933789 bytes
-rw-r--r--R_AR/image3.pngbin0 -> 190098 bytes
-rwxr-xr-xR_AR/main.r74
-rw-r--r--R_Tree/.md3
-rw-r--r--R_Tree/Rplots.pdfbin0 -> 17450 bytes
-rwxr-xr-xR_Tree/main.r110
-rw-r--r--R_Tree/survey.csv751
10 files changed, 942 insertions, 10 deletions
diff --git a/R_AR/Rplots.pdf b/R_AR/Rplots.pdf
index e3c1f2f..c0df385 100644
--- a/R_AR/Rplots.pdf
+++ b/R_AR/Rplots.pdf
Binary files differ
diff --git a/R_AR/aaaaaaa.md b/R_AR/aaaaaaa.md
new file mode 100644
index 0000000..6e84df7
--- /dev/null
+++ b/R_AR/aaaaaaa.md
@@ -0,0 +1,14 @@
+support = count(x)/count(total)
+
+confidence - если х, то скорей всего y = supp(x + y)/supp(x)
+
+lift - насколько зависят друг от друга = supp(x+y)/(supp(x) supp(y))
+1 независимы
+
+lift(a, b) = 1.25 -> +25% мощнее правила, что a просто покупают
+
+https://habr.com/ru/companies/ods/articles/353502/
+
+
+
+откидываем нечастые
diff --git a/R_AR/conviction.pdf b/R_AR/conviction.pdf
new file mode 100644
index 0000000..779a04e
--- /dev/null
+++ b/R_AR/conviction.pdf
Binary files differ
diff --git a/R_AR/image.png b/R_AR/image.png
new file mode 100644
index 0000000..bb60174
--- /dev/null
+++ b/R_AR/image.png
Binary files differ
diff --git a/R_AR/image3.png b/R_AR/image3.png
new file mode 100644
index 0000000..ca376b6
--- /dev/null
+++ b/R_AR/image3.png
Binary files differ
diff --git a/R_AR/main.r b/R_AR/main.r
index fd1f653..278c504 100755
--- a/R_AR/main.r
+++ b/R_AR/main.r
@@ -5,24 +5,78 @@ library(arules)
if (!require(arulesViz)) install.packages("arulesViz", repos = "https://cran.r-project.org/", Ncpus = 16) # nolint
library(arulesViz)
-
-
t <- read.transactions("AssociationRules.csv", format = "basket", sep = " ")
-image(t)
summary(t)
-# most freq
-names(sort(itemFrequency(t), decreasing=TRUE)[1])
-# max amount of items
+# a most freq
+names(sort(itemFrequency(t), decreasing = TRUE)[1])
+# b max amount of items
max(size(t))
# 0..0.01 support, 0 confidence
-rules0 <- apriori(t, parameter=list(supp=0.01, conf=0))
+rules0 <- apriori(t, parameter = list(supp = 0.01, conf = 0, minlen = 2))
+# c
length(rules0)
-rules05 <- apriori(t, parameter=list(supp=0.01, conf=0.5))
+
+# d
+rules05 <- apriori(t, parameter = list(supp = 0.01, conf = 0.5, minlen = 2))
length(rules05)
-plot(rules0, measure=c("support", "confidence"), shading="lift")
-plot(rules0, measure=c("support", "lift")) \ No newline at end of file
+# f
+plot(rules05, measure = c("support", "lift"), shading = "confidence")
+# g
+plot(rules05, measure = c("support", "confidence"), shading = "lift")
+
+# j
+rules_supp_01 <- subset(rules05, support >= 0.1)
+rules_by_support <- sort(rules_supp_01, by = "support", descreasing = FALSE)
+inspect(rules_by_support)
+
+
+# k
+rules_conf_08 <- sort(subset(rules05, confidence > 0.8), by = "lift", descreasing = FALSE)
+inspect(rules_conf_08)
+
+plot(rules_conf_08,
+ method = "matrix",
+ engine = "grid",
+ measure = "confidence",
+ shading = "lift",
+ control = list(recorded = FALSE)
+)
+
+plot(rules_conf_08,
+ method = "matrix",
+ engine = "grid",
+ shading = c("lift", "confidence")
+)
+
+
+# n
+rules_lift_max_3 <- head(sort(rules05, by = "lift", descreasing = TRUE), 3)
+# o
+plot(rules_lift_max_3, method = "graph", engine = "igraph")
+inspect(rules_lift_max_3)
+
+exit
+# q
+train_t <- head(t, 8000)
+test_t <- tail(t, 2000)
+
+train <- apriori(train_t, parameter = list(supp = 0.01, conf = 0.5))
+subset_train <- subset(train, lift > 15)
+subset_train_df <- as(subset_train, "data.frame")
+
+# https://www.rdocumentation.org/packages/arules/versions/1.7-9/topics/interestMeasure
+test <- interestMeasure(subset_train, transactions = test_t, measure = c("support", "confidence", "lift", "count"), reuse = FALSE)
+
+for (i in 1:length(subset_train)) {
+ cat(subset_train_df$rules[i])
+ cat("\n")
+ cat("train conf:", subset_train_df$confidence[i], "lift:", subset_train_df$lift[i])
+ cat("\n")
+ cat("test conf:", test$confidence[i], "lift:", test$lift[i])
+ cat("\n")
+}
diff --git a/R_Tree/.md b/R_Tree/.md
new file mode 100644
index 0000000..ce7b7a2
--- /dev/null
+++ b/R_Tree/.md
@@ -0,0 +1,3 @@
+complexity parameter cp
+
+ΔR=Rparent​−(RL​+RR​) \ No newline at end of file
diff --git a/R_Tree/Rplots.pdf b/R_Tree/Rplots.pdf
new file mode 100644
index 0000000..bccd45f
--- /dev/null
+++ b/R_Tree/Rplots.pdf
Binary files differ
diff --git a/R_Tree/main.r b/R_Tree/main.r
new file mode 100755
index 0000000..ddac2fb
--- /dev/null
+++ b/R_Tree/main.r
@@ -0,0 +1,110 @@
+#!/usr/bin/env Rscript
+
+data <- read.csv("survey.csv")
+train <- head(data, 600)
+test <- tail(data, 150)
+
+if (!require(rpart)) install.packages("rpart", repos = "https://cran.r-project.org/", Ncpus = 16) # nolint
+library(rpart)
+library(rpart.plot)
+
+
+dt <- rpart(MYDEPV ~ Price + Income + Age,
+ data = train,
+ method = "class",
+ parms = list(split = "information"), # information gain splitting index
+ control = rpart.control(xval = 3) # three-fold cross-validation
+)
+
+printcp(dt)
+rpart.plot(dt, extra = 106)
+# summary(dt)
+
+
+tree_stats <- function(tree) {
+ frm <- tree$frame
+ internal_nodes <- sum(as.character(frm$var) != "<leaf>")
+
+ node_indexes <- as.integer(row.names(frm))
+ depth <- floor(log2(node_indexes)) + 1L
+
+ list(internal_nodes = as.integer(internal_nodes), height = as.integer(max(depth)))
+}
+
+stats <- tree_stats(dt)
+stats$internal_nodes
+stats$height
+
+
+pred <- predict(dt, train, type = "class")
+conf_matrix <- table(Predicted = pred, Actual = train$MYDEPV)
+conf_matrix
+
+misclass <- function(tt) {
+ # total_wrong / total_records
+ overall_misclass <- (sum(tt) - sum(diag(tt))) / sum(tt)
+ cat("Overall misclassification rate:", round(overall_misclass, 4), "\n")
+
+ classes <- rownames(tt)
+ misclass_per_class <- numeric(length(classes))
+ names(misclass_per_class) <- classes
+
+ for (cls in classes) {
+ correct <- tt[cls, cls]
+ total_in_class <- sum(tt[, cls])
+ misclass_per_class[cls] <- (total_in_class - correct) / total_in_class
+ }
+
+ cat("Misclassification rate per income class:\n")
+ print(round(misclass_per_class, 4))
+}
+
+misclass(conf_matrix)
+
+if (!require(ROCR)) install.packages("ROCR", repos = "https://cran.r-project.org/", Ncpus = 16) # nolint
+library(ROCR)
+
+rocr_pred <- prediction(predict(dt, type = "prob")[, 2], train$MYDEPV)
+roc <- performance(rocr_pred, "tpr", "fpr")
+auc <- performance(rocr_pred, "auc")
+
+plot(roc, col = "blue", main = "ROC")
+auc@y.values
+
+
+print("score with test data")
+pred <- predict(dt, test, type = "class")
+conf_matrix <- table(Predicted = pred, Actual = test$MYDEPV)
+conf_matrix
+misclass(conf_matrix)
+
+
+
+dt_gini <- rpart(MYDEPV ~ Price + Income + Age,
+ data = train,
+ method = "class",
+ parms = list(split = "gini"), # information gain splitting index
+ control = rpart.control(xval = 3) # three-fold cross-validation
+)
+
+printcp(dt_gini)
+rpart.plot(dt_gini, extra = 106)
+# summary(dt_gini)
+
+cp_table <- dt_gini$cptable
+optimal_cp <- cp_table[which.min(cp_table[, "xerror"]), "CP"]
+cat("Optimal CP value for pruning:", optimal_cp, "\n")
+dt_gini_pruned <- prune(dt_gini, cp = optimal_cp)
+
+printcp(dt_gini_pruned)
+rpart.plot(dt_gini_pruned, extra = 106)
+
+pred_gini <- predict(dt_gini_pruned, train, type = "class")
+conf_matrix_gini <- table(Predicted = pred_gini, Actual = train$MYDEPV)
+conf_matrix_gini
+
+misclass(conf_matrix_gini)
+
+stats <- tree_stats(dt_gini_pruned)
+stats$internal_nodes
+stats$height
diff --git a/R_Tree/survey.csv b/R_Tree/survey.csv
new file mode 100644
index 0000000..f39e287
--- /dev/null
+++ b/R_Tree/survey.csv
@@ -0,0 +1,751 @@
+MYDEPV,Price,Income,Age
+1,10,33,37
+0,20,21,55
+1,30,59,55
+1,20,76,44
+0,30,24,37
+0,20,22,32
+1,10,28,32
+1,10,49,38
+0,30,76,43
+1,20,59,55
+0,30,45,32
+0,30,21,46
+0,30,49,44
+0,10,23,30
+1,10,55,55
+0,20,29,32
+1,10,49,44
+0,20,45,32
+0,20,24,37
+0,10,30,32
+0,10,24,55
+1,10,59,55
+0,30,31,32
+0,20,33,32
+0,30,22,32
+0,30,29,32
+0,10,30,32
+0,20,28,32
+1,30,59,55
+0,30,56,43
+1,30,77,43
+1,20,97,18
+0,20,23,32
+0,30,25,37
+0,30,23,32
+0,30,88,43
+0,30,49,44
+1,30,76,44
+1,20,67,25
+1,10,55,55
+0,20,26,37
+1,20,49,44
+1,20,68,25
+0,30,45,32
+1,20,68,43
+0,20,32,35
+1,30,22,55
+1,30,55,55
+1,20,66,43
+0,20,29,32
+1,10,49,44
+1,10,28,32
+1,10,23,37
+0,20,45,32
+0,30,22,37
+1,10,66,25
+0,20,30,32
+0,20,43,27
+0,20,34,55
+0,30,32,32
+1,10,67,25
+0,20,25,27
+1,20,49,38
+0,30,33,55
+0,20,30,32
+1,10,34,37
+0,30,33,32
+0,10,32,27
+0,20,30,32
+1,20,66,25
+0,30,29,32
+1,10,25,37
+1,20,55,55
+0,30,22,32
+1,10,28,38
+0,20,22,44
+0,30,28,32
+0,10,45,32
+1,20,65,22
+1,10,78,21
+1,30,66,25
+1,20,99,25
+0,10,21,44
+0,20,23,37
+0,30,22,37
+1,30,88,43
+0,30,28,32
+1,30,49,55
+1,10,55,55
+0,20,29,32
+0,30,87,43
+1,30,66,25
+1,20,77,22
+1,10,26,37
+0,30,45,32
+0,20,43,22
+1,30,64,33
+0,20,45,32
+0,10,30,32
+0,30,56,43
+0,20,30,32
+0,30,30,32
+1,10,78,25
+1,20,77,43
+1,20,49,38
+0,30,32,35
+0,10,29,32
+1,20,89,22
+0,30,30,32
+1,30,55,55
+0,20,22,32
+0,20,32,32
+0,30,30,32
+0,30,49,44
+1,10,77,43
+1,20,59,55
+0,20,30,32
+0,30,22,27
+1,20,68,25
+1,10,59,55
+1,30,17,23
+0,20,22,32
+1,10,44,43
+1,20,76,21
+0,20,29,32
+1,10,59,55
+0,20,29,32
+0,30,23,30
+1,20,49,44
+0,20,33,32
+0,20,23,32
+1,10,64,33
+1,10,49,44
+1,30,57,25
+1,10,28,32
+0,10,22,32
+0,30,22,44
+0,20,33,23
+0,30,46,43
+0,30,22,32
+1,20,59,55
+0,10,22,32
+1,20,59,55
+1,10,33,24
+0,10,55,44
+0,30,49,38
+1,30,77,25
+0,20,22,37
+1,30,55,55
+1,30,22,25
+1,10,44,37
+0,30,21,37
+1,20,49,44
+1,20,55,55
+0,30,33,32
+0,10,30,32
+0,10,29,32
+0,30,49,38
+1,10,21,37
+1,10,55,25
+0,30,22,32
+0,20,28,32
+0,10,25,27
+1,20,98,43
+1,20,43,37
+0,30,49,38
+1,20,76,43
+0,10,30,32
+0,30,32,27
+1,10,59,55
+0,20,21,27
+0,30,55,44
+1,20,77,24
+0,30,34,37
+1,10,59,55
+1,10,65,25
+1,10,78,65
+0,20,19,46
+1,10,65,22
+1,20,59,55
+1,30,55,27
+0,20,29,32
+1,20,49,38
+0,20,23,38
+1,20,34,37
+0,30,30,32
+1,30,59,55
+1,10,22,25
+1,10,55,23
+0,30,29,32
+0,20,22,32
+0,20,33,27
+1,10,56,43
+1,10,49,44
+1,10,68,25
+0,10,22,32
+0,20,33,32
+0,20,22,27
+0,30,28,32
+0,20,45,32
+0,30,28,38
+0,30,24,27
+0,20,30,32
+0,20,29,32
+0,10,24,32
+1,10,28,32
+1,10,55,55
+0,20,20,47
+0,20,30,32
+0,30,28,32
+0,10,30,32
+0,20,22,37
+0,30,20,47
+0,30,45,32
+0,10,30,32
+0,20,22,27
+1,10,33,25
+0,10,30,32
+0,10,21,55
+0,10,45,32
+1,30,68,25
+0,10,30,32
+1,30,65,22
+0,30,49,44
+1,30,44,25
+0,20,28,32
+1,10,49,32
+1,10,66,43
+0,30,45,32
+1,10,65,25
+1,20,55,23
+1,30,78,21
+1,10,66,22
+0,20,25,37
+0,10,43,22
+1,10,66,43
+0,30,21,55
+0,20,23,27
+0,30,29,32
+1,20,56,43
+0,30,24,27
+1,10,55,44
+1,20,59,55
+0,10,34,25
+0,20,34,23
+1,20,66,25
+0,30,34,25
+0,20,32,32
+0,10,33,27
+1,10,88,23
+0,30,29,32
+0,30,22,27
+1,20,17,23
+1,10,54,25
+1,20,77,25
+1,10,59,55
+0,10,33,32
+0,20,32,37
+0,10,22,37
+1,20,55,37
+1,30,59,55
+0,10,29,32
+1,10,32,32
+1,20,28,38
+1,10,66,25
+0,10,45,32
+1,20,55,55
+0,10,19,46
+0,30,21,44
+1,20,49,44
+0,10,33,32
+0,20,30,32
+1,30,89,22
+0,30,30,32
+0,20,34,25
+1,30,55,55
+0,30,30,32
+0,20,55,44
+0,20,30,32
+1,30,59,55
+0,30,34,55
+0,10,33,23
+0,10,30,32
+0,10,45,32
+0,10,29,32
+0,30,78,43
+0,30,30,32
+0,30,22,37
+1,20,49,44
+1,20,49,38
+1,30,33,24
+1,20,57,25
+1,10,17,23
+1,10,55,55
+1,10,76,21
+0,10,29,32
+0,10,23,32
+0,20,33,55
+0,30,29,32
+0,20,29,32
+0,10,29,32
+0,20,28,32
+1,20,55,25
+0,10,22,32
+0,30,32,32
+1,20,78,43
+1,10,87,43
+1,10,49,44
+0,10,43,27
+0,30,26,37
+0,20,29,32
+0,20,30,32
+0,10,22,66
+1,30,66,25
+0,10,30,32
+1,10,88,43
+0,20,33,34
+1,10,99,25
+1,20,55,55
+1,10,34,33
+0,30,28,32
+0,10,29,32
+0,30,29,32
+0,30,49,38
+0,30,33,44
+0,10,33,32
+1,10,59,55
+1,20,55,44
+1,20,66,43
+1,10,67,25
+0,10,30,32
+0,30,21,37
+0,30,30,32
+0,20,22,37
+0,20,30,32
+0,30,45,32
+0,30,28,38
+1,20,65,25
+0,30,30,32
+1,10,76,44
+1,10,49,44
+1,30,34,33
+0,30,22,27
+0,10,33,44
+0,10,30,32
+1,30,55,44
+1,20,77,25
+0,10,21,27
+1,10,76,43
+0,20,22,45
+0,30,29,32
+0,20,21,37
+0,30,33,37
+0,20,43,24
+1,10,59,55
+1,10,55,37
+1,20,49,44
+1,30,88,23
+0,20,25,37
+0,30,55,37
+0,10,34,55
+0,20,28,32
+0,30,30,32
+0,30,28,32
+0,10,45,32
+1,10,59,55
+0,30,29,32
+0,20,24,32
+0,10,30,32
+1,10,77,25
+1,20,87,43
+0,10,23,38
+1,10,28,38
+1,30,98,43
+0,20,33,32
+1,10,59,55
+0,20,45,32
+1,20,67,25
+0,10,21,46
+0,20,22,32
+1,20,28,38
+1,10,21,37
+1,30,44,22
+0,10,33,27
+0,20,28,32
+0,30,29,32
+1,20,78,65
+0,20,24,27
+0,30,67,43
+1,30,97,18
+0,30,28,32
+0,10,30,32
+0,30,24,55
+0,20,33,37
+0,30,33,32
+1,20,33,24
+1,30,55,25
+0,10,33,34
+1,10,55,55
+0,10,24,37
+0,10,30,32
+0,10,22,37
+0,30,22,45
+0,30,30,32
+1,30,55,55
+1,30,66,25
+0,30,29,32
+1,10,22,55
+0,30,29,32
+0,10,30,32
+0,30,30,32
+0,30,33,23
+0,20,31,32
+1,30,55,55
+0,30,29,32
+0,10,32,35
+0,30,33,32
+0,30,30,32
+1,10,49,44
+0,30,23,38
+1,20,64,33
+1,20,78,43
+1,10,67,43
+1,30,78,65
+0,20,33,23
+0,30,49,44
+1,10,28,32
+0,20,28,32
+0,20,30,32
+0,20,30,32
+0,30,22,37
+1,10,49,44
+1,10,88,43
+1,10,32,32
+1,20,33,43
+1,20,56,26
+0,30,44,37
+0,20,32,27
+0,10,22,37
+0,20,33,27
+1,10,98,43
+0,20,21,37
+0,30,30,32
+0,30,31,32
+0,30,33,23
+0,30,30,32
+0,20,29,32
+0,30,29,32
+1,20,49,38
+1,30,59,55
+1,30,59,55
+0,30,43,27
+0,30,21,54
+0,10,22,44
+1,10,56,26
+0,30,30,32
+0,10,22,27
+1,10,68,25
+1,10,66,25
+1,30,77,25
+0,20,28,32
+0,30,49,44
+0,10,33,32
+1,30,33,25
+1,10,28,32
+0,20,22,27
+0,20,33,32
+0,20,30,32
+1,30,33,43
+1,10,33,43
+1,20,59,55
+0,10,34,23
+0,30,49,44
+1,10,77,22
+1,20,49,44
+1,10,56,43
+1,20,65,25
+0,10,23,27
+1,10,78,43
+1,30,55,55
+0,20,22,66
+1,10,59,55
+0,10,25,37
+1,10,59,55
+0,30,33,32
+0,10,45,32
+0,20,22,32
+0,20,28,32
+0,10,21,54
+1,10,44,22
+0,30,43,37
+0,20,45,32
+0,20,25,27
+1,20,49,44
+0,30,43,22
+0,10,33,55
+1,10,55,23
+0,10,22,32
+0,30,29,32
+0,10,30,32
+1,20,22,55
+0,20,33,44
+1,30,55,55
+0,10,29,32
+1,30,65,25
+1,30,99,25
+1,30,66,43
+0,10,22,32
+0,20,29,32
+1,30,67,25
+1,20,66,25
+0,20,22,27
+0,30,30,32
+0,30,22,32
+0,20,29,32
+1,10,49,38
+0,30,24,32
+0,20,21,54
+0,10,29,32
+0,30,23,27
+1,10,28,32
+0,30,49,44
+1,30,49,32
+0,20,29,32
+1,20,66,25
+0,30,33,34
+0,20,29,32
+1,10,46,43
+0,10,30,32
+1,30,65,25
+1,20,44,25
+1,10,59,55
+0,10,24,27
+0,10,22,27
+0,20,22,37
+1,10,77,25
+0,20,30,32
+0,10,33,32
+1,10,55,25
+1,30,56,26
+0,30,68,43
+1,20,55,55
+1,30,77,24
+0,30,49,44
+1,30,59,55
+0,10,29,32
+0,30,49,38
+1,20,49,44
+1,20,59,55
+0,30,49,44
+1,20,55,27
+0,30,19,46
+0,10,29,32
+0,10,29,32
+1,30,66,43
+1,20,55,55
+1,10,55,55
+1,10,28,32
+0,30,22,32
+1,20,59,55
+0,30,45,32
+1,10,49,38
+1,20,28,38
+0,10,33,32
+1,10,22,37
+0,30,33,27
+0,30,33,32
+1,10,28,38
+1,20,59,55
+0,30,33,37
+1,20,33,25
+1,10,55,55
+0,30,78,43
+1,20,67,43
+1,20,49,32
+0,10,30,32
+1,10,66,25
+1,10,49,38
+1,20,56,43
+1,20,78,25
+1,30,55,55
+0,30,22,37
+1,30,59,55
+1,20,55,55
+0,30,23,37
+1,10,49,44
+1,10,49,38
+0,10,43,24
+0,10,23,32
+1,20,49,44
+0,30,31,32
+0,30,33,27
+0,20,22,37
+1,30,77,22
+1,30,59,55
+0,20,22,32
+1,30,55,23
+0,30,30,32
+1,10,49,44
+0,20,22,32
+1,10,68,43
+1,10,49,55
+1,20,49,55
+0,20,29,32
+0,10,24,27
+1,20,36,37
+0,30,32,37
+1,10,57,25
+0,10,21,37
+1,20,59,55
+0,30,22,32
+0,10,31,32
+0,30,29,32
+1,20,55,25
+1,10,89,22
+0,30,22,66
+0,10,22,27
+0,30,34,23
+0,20,29,32
+1,20,34,33
+0,10,45,32
+0,10,20,47
+1,10,33,37
+0,20,30,32
+0,10,45,32
+0,30,28,38
+0,20,21,37
+1,30,76,21
+0,30,29,32
+0,30,49,38
+0,20,55,36
+1,10,55,27
+0,10,29,32
+0,20,24,27
+1,10,28,32
+0,30,22,27
+0,30,29,32
+1,10,97,18
+1,30,67,25
+1,30,55,55
+0,30,25,37
+1,10,22,37
+0,30,28,32
+0,20,33,32
+0,30,49,44
+1,20,22,25
+1,10,77,24
+0,10,29,32
+0,30,55,36
+0,10,32,37
+1,20,59,55
+0,20,29,32
+1,10,28,38
+1,20,88,43
+0,20,29,32
+0,20,23,30
+1,30,55,25
+1,20,88,43
+1,10,49,44
+1,30,54,25
+1,20,55,55
+0,30,28,32
+1,20,88,23
+0,20,44,37
+0,20,21,46
+1,10,49,38
+1,20,55,23
+0,10,29,32
+1,10,44,25
+0,20,31,32
+0,30,29,32
+0,30,33,24
+0,10,33,23
+0,10,31,32
+1,30,59,55
+0,10,22,27
+0,10,22,32
+1,20,55,55
+1,10,43,37
+0,30,22,32
+0,10,25,27
+0,20,31,32
+0,20,29,32
+1,20,44,43
+0,20,45,32
+0,10,29,32
+1,30,55,23
+0,20,30,32
+0,30,30,32
+1,10,49,44
+0,20,30,32
+0,30,25,27
+0,10,29,32
+0,20,33,24
+1,20,55,55
+0,30,44,43
+0,10,29,32
+1,10,36,37
+0,30,21,27
+1,20,66,43
+0,30,49,44
+0,30,36,37
+0,30,30,32
+1,20,88,23
+1,20,49,38
+0,30,45,32
+1,20,46,43
+0,20,21,44
+1,20,66,22
+0,30,23,32
+1,20,59,55
+0,10,22,45
+0,20,30,32
+0,10,33,24
+0,10,29,32
+0,30,29,32
+0,10,31,32
+1,10,78,43
+0,20,33,37
+1,20,78,21
+1,10,88,23
+1,20,59,55
+1,30,59,55
+0,30,43,24
+1,30,78,25
+1,30,88,23
+1,30,66,22
+1,20,54,25
+0,20,45,32
+1,20,49,44
+0,20,24,55
+1,10,66,43
+1,20,44,22
+1,10,55,55
+1,30,59,55
+0,20,30,32
+0,10,22,32
+1,20,49,44
+0,30,66,43
+1,30,68,25
+1,30,59,55
+0,20,28,38
+1,10,59,55
+0,20,29,32
+1,10,55,55
+0,30,25,27
+0,10,29,32
+0,10,55,36
+0,30,21,37
+0,30,28,38 \ No newline at end of file