diff --git a/_meta/beat.yml b/_meta/beat.yml
index 2619358..ad8881d 100644
--- a/_meta/beat.yml
+++ b/_meta/beat.yml
@@ -5,4 +5,4 @@
youtubebeat:
# Defines how often an event is sent to the output
period: 1s
- start_url: "https://www.youtube.com/watch?v=SmBCZgcGlKk"
+ start_id: "SmBCZgcGlKk"
diff --git a/_meta/fields.yml b/_meta/fields.yml
index 56276e3..abd0c5a 100644
--- a/_meta/fields.yml
+++ b/_meta/fields.yml
@@ -2,8 +2,23 @@
title: youtubebeat
description:
fields:
- - name: counter
+ - name: url
+ type: string
+ required: true
+ description: >
+ Video's full URL
+ - name: title
+ type: string
+ required: true
+ description: >
+ Video title
+ - name: views
type: long
required: true
description: >
- PLEASE UPDATE DOCUMENTATION
+ Video's view count
+ - name: date
+ type: string
+ required: true
+ description: >
+ Video's publish date
diff --git a/_meta/kibana.generated/5/index-pattern/youtubebeat.json b/_meta/kibana.generated/5/index-pattern/youtubebeat.json
index 48051e3..bf532db 100644
--- a/_meta/kibana.generated/5/index-pattern/youtubebeat.json
+++ b/_meta/kibana.generated/5/index-pattern/youtubebeat.json
@@ -1,6 +1,6 @@
{
"fieldFormatMap": "{\"@timestamp\":{\"id\":\"date\"}}",
- "fields": "[{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"counter\",\"scripted\":false,\"searchable\":true,\"type\":\"number\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.hostname\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.timezone\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.version\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"@timestamp\",\"scripted\":false,\"searchable\":true,\"type\":\"date\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"tags\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"fields\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"error.message\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"error.code\",\"scripted\":false,\"searchable\":true,\"type\":\"number\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"error.type\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.provider\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.instance_id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.instance_name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.machine_type\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.availability_zone\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.project_id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.region\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.image\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.labels\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.architecture\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.os.platform\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.os.version\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.os.family\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.ip\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.mac\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.pod.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.pod.uid\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.namespace\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.node.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.labels\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.annotations\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.container.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.container.image\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_id\",\"scripted\":false,\"searchable\":false,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_type\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_index\",\"scripted\":false,\"searchable\":false,\"type\":\"string\"},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_score\",\"scripted\":false,\"searchable\":false,\"type\":\"number\"}]",
+ "fields": "[{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"url\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"title\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"views\",\"scripted\":false,\"searchable\":true,\"type\":\"number\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"date\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.hostname\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.timezone\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.version\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"@timestamp\",\"scripted\":false,\"searchable\":true,\"type\":\"date\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"tags\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"fields\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"error.message\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"error.code\",\"scripted\":false,\"searchable\":true,\"type\":\"number\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"error.type\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.provider\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.instance_id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.instance_name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.machine_type\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.availability_zone\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.project_id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.region\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.image\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.labels\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.architecture\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.os.platform\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.os.version\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.os.family\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.ip\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.mac\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.pod.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.pod.uid\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.namespace\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.node.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.labels\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.annotations\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.container.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.container.image\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_id\",\"scripted\":false,\"searchable\":false,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_type\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_index\",\"scripted\":false,\"searchable\":false,\"type\":\"string\"},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_score\",\"scripted\":false,\"searchable\":false,\"type\":\"number\"}]",
"timeFieldName": "@timestamp",
"title": "youtubebeat-*"
}
\ No newline at end of file
diff --git a/_meta/kibana.generated/6/index-pattern/youtubebeat.json b/_meta/kibana.generated/6/index-pattern/youtubebeat.json
index d7c8600..084b923 100644
--- a/_meta/kibana.generated/6/index-pattern/youtubebeat.json
+++ b/_meta/kibana.generated/6/index-pattern/youtubebeat.json
@@ -3,7 +3,7 @@
{
"attributes": {
"fieldFormatMap": "{\"@timestamp\":{\"id\":\"date\"}}",
- "fields": "[{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"counter\",\"scripted\":false,\"searchable\":true,\"type\":\"number\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.hostname\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.timezone\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.version\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"@timestamp\",\"scripted\":false,\"searchable\":true,\"type\":\"date\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"tags\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"fields\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"error.message\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"error.code\",\"scripted\":false,\"searchable\":true,\"type\":\"number\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"error.type\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.provider\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.instance_id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.instance_name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.machine_type\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.availability_zone\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.project_id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.region\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.image\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.labels\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.architecture\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.os.platform\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.os.version\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.os.family\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.ip\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.mac\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.pod.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.pod.uid\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.namespace\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.node.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.labels\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.annotations\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.container.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.container.image\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_id\",\"scripted\":false,\"searchable\":false,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_type\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_index\",\"scripted\":false,\"searchable\":false,\"type\":\"string\"},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_score\",\"scripted\":false,\"searchable\":false,\"type\":\"number\"}]",
+ "fields": "[{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"url\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"title\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"views\",\"scripted\":false,\"searchable\":true,\"type\":\"number\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"date\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.hostname\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.timezone\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"beat.version\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"@timestamp\",\"scripted\":false,\"searchable\":true,\"type\":\"date\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"tags\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"fields\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"error.message\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"error.code\",\"scripted\":false,\"searchable\":true,\"type\":\"number\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"error.type\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.provider\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.instance_id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.instance_name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.machine_type\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.availability_zone\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.project_id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"meta.cloud.region\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.image\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"docker.container.labels\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.id\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.architecture\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.os.platform\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.os.version\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.os.family\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.ip\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"host.mac\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.pod.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.pod.uid\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.namespace\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.node.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.labels\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.annotations\",\"scripted\":false,\"searchable\":true},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.container.name\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":true,\"indexed\":true,\"name\":\"kubernetes.container.image\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_id\",\"scripted\":false,\"searchable\":false,\"type\":\"string\"},{\"aggregatable\":true,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_type\",\"scripted\":false,\"searchable\":true,\"type\":\"string\"},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_index\",\"scripted\":false,\"searchable\":false,\"type\":\"string\"},{\"aggregatable\":false,\"analyzed\":false,\"count\":0,\"doc_values\":false,\"indexed\":false,\"name\":\"_score\",\"scripted\":false,\"searchable\":false,\"type\":\"number\"}]",
"timeFieldName": "@timestamp",
"title": "youtubebeat-*"
},
diff --git a/beater/youtubebeat.go b/beater/youtubebeat.go
index 307fb15..bdade93 100644
--- a/beater/youtubebeat.go
+++ b/beater/youtubebeat.go
@@ -2,6 +2,9 @@ package beater
import (
"fmt"
+ "github.com/gocolly/colly"
+ "strconv"
+ "strings"
"time"
"github.com/elastic/beats/libbeat/beat"
@@ -32,6 +35,54 @@ func New(b *beat.Beat, cfg *common.Config) (beat.Beater, error) {
return bt, nil
}
+func scrapeVideos(startId string, fieldsToSend chan common.MapStr) {
+ const BaseUrl = "https://www.youtube.com"
+ const BaseSuffix = "/watch?v="
+
+ videoCollector := colly.NewCollector(
+ colly.AllowedDomains("youtube.com", "www.youtube.com"),
+ colly.Async(true),
+ colly.MaxDepth(10),
+ )
+
+ videoCollector.OnHTML("body", func(e *colly.HTMLElement) {
+ url := e.Request.URL.String()
+ isPaid := e.ChildAttr("meta[itemprop=\"paid\"]", "content")
+ if isPaid == "True" {
+ logp.Err("Not parsing video because of isPaid" + url)
+ return
+ }
+ title := e.ChildAttr("meta[itemprop=\"name\"]", "content")
+ if title == "YouTube" {
+ logp.Err("Not parsing video because of title " + url)
+ return
+ }
+ views, err := strconv.ParseInt(e.ChildAttr("meta[itemprop=\"interactionCount\"]", "content"), 10, 64)
+ if err != nil {
+ logp.Err("Can't parse view count for URL " + url)
+ return
+ }
+ date := e.ChildAttr("meta[itemprop=\"datePublished\"]", "content")
+ fields := common.MapStr{
+ "url": url,
+ "title": title,
+ "views": views,
+ "date": date,
+ }
+ fieldsToSend <- fields
+ })
+
+ videoCollector.OnHTML("a[href]", func(e *colly.HTMLElement) {
+ href := e.Attr("href")
+ if strings.HasPrefix(href, BaseSuffix) {
+ e.Request.Visit(BaseUrl + href)
+ }
+ })
+
+ videoCollector.Visit(BaseUrl + BaseSuffix + startId)
+ videoCollector.Wait()
+}
+
// Run starts youtubebeat.
func (bt *Youtubebeat) Run(b *beat.Beat) error {
logp.Info("youtubebeat is running! Hit CTRL-C to stop it.")
@@ -42,25 +93,24 @@ func (bt *Youtubebeat) Run(b *beat.Beat) error {
return err
}
+ fieldsToSend := make(chan common.MapStr)
+ go scrapeVideos(bt.config.StartId, fieldsToSend)
+
ticker := time.NewTicker(bt.config.Period)
- counter := 1
for {
select {
case <-bt.done:
return nil
case <-ticker.C:
}
-
+ fields := <-fieldsToSend
+ fields["type"] = b.Info.Name
event := beat.Event{
Timestamp: time.Now(),
- Fields: common.MapStr{
- "type": b.Info.Name,
- "counter": counter,
- },
+ Fields: fields,
}
bt.client.Publish(event)
logp.Info("Event sent")
- counter++
}
}
diff --git a/config/config.go b/config/config.go
index 41f9583..7d0f392 100644
--- a/config/config.go
+++ b/config/config.go
@@ -6,11 +6,11 @@ package config
import "time"
type Config struct {
- Period time.Duration `config:"period"`
- StartUrl string `config:"start_url"`
+ Period time.Duration `config:"period"`
+ StartId string `config:"start_id"`
}
var DefaultConfig = Config{
- Period: 1 * time.Second,
- StartUrl: "https://www.youtube.com/watch?v=SmBCZgcGlKk",
+ Period: 1 * time.Second,
+ StartId: "SmBCZgcGlKk",
}
diff --git a/data/meta.json b/data/meta.json
new file mode 100644
index 0000000..00f74d2
--- /dev/null
+++ b/data/meta.json
@@ -0,0 +1 @@
+{"uuid":"a5fddc65-b03a-4cde-b7a9-f884d277841d"}
diff --git a/docs/fields.asciidoc b/docs/fields.asciidoc
index 2128a8e..3186438 100644
--- a/docs/fields.asciidoc
+++ b/docs/fields.asciidoc
@@ -431,14 +431,50 @@ Kubernetes container image
None
-*`counter`*::
+*`url`*::
++
+--
+type: string
+
+required: True
+
+Video's full URL
+
+
+--
+
+*`title`*::
++
+--
+type: string
+
+required: True
+
+Video title
+
+
+--
+
+*`views`*::
+
--
type: long
required: True
-PLEASE UPDATE DOCUMENTATION
+Video's view count
+
+
+--
+
+*`date`*::
++
+--
+type: string
+
+required: True
+
+Video's publish date
--
diff --git a/fields.yml b/fields.yml
index 219eb87..2943860 100644
--- a/fields.yml
+++ b/fields.yml
@@ -2,11 +2,26 @@
title: youtubebeat
description:
fields:
- - name: counter
+ - name: url
+ type: string
+ required: true
+ description: >
+ Video's full URL
+ - name: title
+ type: string
+ required: true
+ description: >
+ Video title
+ - name: views
type: long
required: true
description: >
- PLEASE UPDATE DOCUMENTATION
+ Video's view count
+ - name: date
+ type: string
+ required: true
+ description: >
+ Video's publish date
- key: beat
title: Beat
diff --git a/include/fields.go b/include/fields.go
index e89f25b..541fdd1 100644
--- a/include/fields.go
+++ b/include/fields.go
@@ -31,5 +31,5 @@ func init() {
// Asset returns asset data
func Asset() string {
- return "eJy0WFtz2zYTffev2MnT983InMZuPB09dOrankbTOPE09rOyAlYkahJgcLGs/voOQPBmUpQcqy8ZE5dzzgK7Z6GcwiNt57BVzroVrQjtCYAVNqeXg5wM06K0Qsn5CcBaUM6N/wvgFCQWNAemnLSkwxiA3ZY0h1zJNA5o+u6EJj4Hqx3FwS4s/BoHAe4+3Vx+vYGHu+vL+xu4/nL1cHvz+f7yfvHl88lJFN1X+/tQZsS7UtKikAaYKgolw76oH/AJRY6rnEBIwDwHeiJpg3aTdMLsxekBEv/ndAz3GYUNoNZgMwoKwZDkQqZhIFcpFGQMpmQSWHRWhW3CNFCGrBfo55mSa5E6jZ4O1iKnmR/3k2jhCXPnd4IzxAOmsP5TKtsFC1sgU8ZGprj+XgWqno6ZnwtD3/zntwZHhYh360qGh1Yz7j+4Rhsa0GSdlsRhtQ1UqiRPI1MwW2OpACVhkwmWtcI7Z6edlEKmI2qsKOgfJQ9QU6/8L9U8kTZCyf1i4sI6rUI6h8tPSXopxMFmwlSpnPRT991vPhRjsSjf9eqUo6WpOl0rXaDtraNnLEpfepcudcbC2YXN4Oyn9xczeH82P/8w/3CenJ+fHXa6QRJsqkSmWIa+QDQxpTls0LTxvQjKYmqmWS71SliNehvWVqfF0FtByPeSdHVRKHn4sBqlQWbb+6jO6QVx5Q69c1Srv4nVtVZ9LKuZR9pulObTQhuvcoZ0W1PeoCqyFwpIa9U33FQrV06T3PhNtQOyitHnL3Iu/FrMQci18pXN0AT/CjwmqZOhY/5dNdHMmvFak6Vn2xncIauVFnGSAQFTfIjeaTEHoXuQIbTHGkD37+wg9CpNYotiuXK87VFX/hNKrZ4EJx+mRY4Wx9vWbZyFtVZFhdRsNf6uWgtCzpdhwbKG9CsZGaP0zi7mlyZhV1LDvixsYnuq93OnvfUVJnCnjBE+cUNPMoCaPOAMUkYzUBq4SIXFXDFCmezUJqSxKBktxZ7SWcSFsLiuJfkmAgWyTMiXpTvGsL8zNRzdvn4YS1yw7ORZc872LCmIC1dMs99WECHFXkcenzkiF3a77LS8RoEzp4TGnr5ne4y0AwShI4q22wlTyRGmbXMTKRe8sbnVRkqcOX0+PPXiFq/lD6XSnKpK282uKd3bav8Ka/bFFwudK/YY6idW+nX9PQJezYGxaL395jkx37NDmVdzvmZNprRdVh1gDmvMjb80lCxTuuY7bap8x4u8kQWj/WGXj8eeQDoR/G2e+CDFd0ctIAg+5uoNXTHWPl7F2M2LAFe/TqMA/5BYOZFbUHJKSscMflDJVcPpsaa4clxRbgZsvbcETL8n9mhZhJOoeJqk9cncpuzH6msEZOEfA51E9V1uYD1tbvrxvZkZuV+Xl2+/k4/xZ8XwNo6U6ZVBjCQ5apYJS8w6fYQYenDwP0rSBJ5/uVhe/DwD1MUMypLNoBCl+f9QijJJmaP1T/q3KfnyFWqgqIGRtMrMwK2ctG4GGyG52uwQ0f/F8+MaIs4oxxoLkW/fTFHBxCA18QztDDitBMoZrDXRyvCpaEU5kNAbmmD/JIz1hra4O0XONRlDZkhQIHtbkDVNhppvUFNLNgNnHOb5Fm4vr7oaah95dCvSkiyZ1k3+7I6N0LbzzTO4/6ZtQaHrJdNtsd2014B6ouFVNlQqfoT20DmBUvHK20ap3FutqcN0pzg8LK6HRP5fUyI7XlAt4pDM/wI76gl6xB1HeGhzPYyoQoMCyyETSqls+P+vo9F1IMc5j/lg6fCy3ttlivYIT7ZR3gr33wAAAP//UV6dfg=="
+ return "eJy0WEtvGzcX3edXXGST7wOkQWM3RqFF0dQpGqFJG6RJtwpFXs2w5pATXtKy+usLcjgvz+jhSN0Y4uucc3lfHM/hDncL2Bnv/BrXyNwzACedwseTAolbWTlp9OIZwEaiEhR+AcxBsxIX4K2KYwC3q3AB5KzUeZqy+NVLi2IBznpMk31Q+DFNAvwlBZoXBBuvFHz++G7AEtVdkKcH2FDcS9zSgEKZswwJgMCN127AI5i7pCUvCCq/VpKKGvlZ8u7QrT+P/ZmAbo12TGoCbsrS6HguORrYPZOKrRWC1MCUArxH7aJsynrxMLAvAGTh52HxnwqMB8BswBUYFQKhFlLncUKZHEokYjlSBsvernhMUgtF6ILAsM6N3sjcWxboYCMVzsJ8WGQO7pny4SR4QhExpQtDbVwfLB6BwpBLTGn/JxOpBjpmYS1OfQnDLy2OiRbv15WNL61hPH5xrTZGYNF5q1HAehepTIWBRudAO3JYgtGwLSQvOuG9u7Nea6nzCTVOlviP0SeoaXb+l2ru0ZI0+riYtLEJqxjO0fk56iAFBbhCUh3K2TB0n/8UTCHHyur5IEV7OTuZoBtjS+YG+/CBlVVIvdc+9+Tg6sYVcPXdy5sZvLxaXL9avLrOrq+vTrvdKAm2dSBjSsOQIBa5sQK2jDr7HhnlWE6HWV7btXSW2V3cW98WZ6EUxHiv0NaOYlrEgbNME+Ou80d9T4+I6+owuEez/ht5k2v1YFWv3OFua6w4LLStVZ7QdjkVClRN9kgBWmvsQEBuja8Ok/wSDjUVkNeMIX6ZEDLsZQqk3piQ2ZxRrF+Rh7ImGHpdsq8mFbN2vtHk8MH1JvfI6qQlnGxEwI0Yo/ea2EnoAWQMHbBG0EOfnYReh0lqUVwZL7oedRuGUFlzLwUGMx0TzLHptvU+rcLGmrJGao9S8FVXgpgQq7hh1UCGnRyJjN3bxcLWLJ7KGtjHiY38SPb+3mtvQ4UZfDBEMgRu7EkEzGIAnEHOcQbGgpC5dEwZjkxne7VJTY5pjit5JHWWaSMs3zSSQhOBkvFC6sepO8VwvDO1HP2+fhpL2rDqxVl7z+4qK1FIXx5mf19DxBB7Gnl65kgl3W7Va3mtAk9zZOTmL/mRQtoDgtgRZdftJNVyJHVt7kDIxdrYerWVklbmD6eHXjoStPxqTK6wzrT97Bbzo632Y9xzzL6U6MLwu5g/KdPfNOMJ8HoNyDEXyq9SyEPPjmler4WcpcJYt6o7wAI2TFFwGtO8MLbhm7dZvufTpZUFk/1hXx1PPQFtJsV5NfGzll89doAgxVRVb+nKqfbxJMZ+XES45nWaBISHxNpL5cDoQ1J6xeAbldy2nAHrEJdia1Q0Yhu8JeDwe+KIlmW8iZqnDdoQzF3Ivq1HEyDL8BjoBWrocqPS08VmmD8amYn7aXF5vk/eps+KsTcuFOl1gZgIcmZ5IR1y5+0FbBjAwf8wyzN4+OFmdfP9DJgtZ1BVfAalrOj/YymGskoxF5705yn5409ogJIGjtoZmoFfe+38DLZSC7PdI2L4xfPtGhLOJMeGlVLtzqaoYZKRFkXB3AwEriXTM9hYxDWJQ9bKaiRhMHWA/Z0kFwra8sOcCWGRCGlMUDJ+npENTcGs2DKLHdkMPHmm1A7ev77ta2jqyJ1fo9XokLpq8lt/boK2W2+fwcM3bQcK/VpyuC12h44WoIFoeFIZqoy4QHvo3UBlRF3bJqn8uaWpx/TBCPi8fDMmCn+pYvxyRnWIY7LwBXbRGwyIe67w1OZ6GlGNBiWrxkxMa+Pi/78uRteDnOa85IOlx8sHb5dDtBd4sk3y1rj/BgAA///8ffLf"
}
diff --git a/logs/youtubebeat b/logs/youtubebeat
new file mode 100644
index 0000000..eeb62f1
--- /dev/null
+++ b/logs/youtubebeat
@@ -0,0 +1,8 @@
+2018-11-18T11:09:17.508+0100 INFO instance/beat.go:616 Home path: [/Users/gaugendre/Projects/golang/src/github.com/Crocmagnon/youtubebeat] Config path: [/Users/gaugendre/Projects/golang/src/github.com/Crocmagnon/youtubebeat] Data path: [/Users/gaugendre/Projects/golang/src/github.com/Crocmagnon/youtubebeat/data] Logs path: [/Users/gaugendre/Projects/golang/src/github.com/Crocmagnon/youtubebeat/logs]
+2018-11-18T11:09:17.509+0100 INFO instance/beat.go:623 Beat UUID: a5fddc65-b03a-4cde-b7a9-f884d277841d
+2018-11-18T11:09:17.509+0100 INFO [beat] instance/beat.go:849 Beat info {"system_info": {"beat": {"path": {"config": "/Users/gaugendre/Projects/golang/src/github.com/Crocmagnon/youtubebeat", "data": "/Users/gaugendre/Projects/golang/src/github.com/Crocmagnon/youtubebeat/data", "home": "/Users/gaugendre/Projects/golang/src/github.com/Crocmagnon/youtubebeat", "logs": "/Users/gaugendre/Projects/golang/src/github.com/Crocmagnon/youtubebeat/logs"}, "type": "youtubebeat", "uuid": "a5fddc65-b03a-4cde-b7a9-f884d277841d"}}}
+2018-11-18T11:09:17.509+0100 INFO [beat] instance/beat.go:858 Build info {"system_info": {"build": {"commit": "38fd4465358c195b17152b60e7e32e6ed0345528", "libbeat": "6.5.0", "time": "2018-11-18T10:08:58.000Z", "version": "6.5.0"}}}
+2018-11-18T11:09:17.509+0100 INFO [beat] instance/beat.go:861 Go runtime info {"system_info": {"go": {"os":"darwin","arch":"amd64","max_procs":8,"version":"go1.11.2"}}}
+2018-11-18T11:09:17.511+0100 INFO [beat] instance/beat.go:865 Host info {"system_info": {"host": {"architecture":"x86_64","boot_time":"2018-11-13T17:51:46.564974+01:00","name":"persephone.local","ip":["127.0.0.1/8","::1/128","fe80::1/64","fe80::1cb1:a370:e195:1b14/64","10.0.0.11/24","2a01:e34:ec0f:96a0:1c7d:59bf:1f3b:b4d9/64","2a01:e34:ec0f:96a0:bdde:b4b8:e13:d8da/64","fe80::d891:cff:fe4e:3f99/64","fe80::8aed:bb9d:8b99:8746/64","fe80::e055:800e:12a0:b862/64","fe80::4e1b:e233:d6da:218/64","fe80::aede:48ff:fe00:1122/64"],"kernel_version":"18.2.0","mac":["b6:00:34:c9:44:01","b6:00:34:c9:44:00","b6:00:34:c9:44:05","b6:00:34:c9:44:04","dc:a9:04:7d:4a:e4","0e:a9:04:7d:4a:e4","da:91:0c:4e:3f:99","b6:00:34:c9:44:01","ac:de:48:00:11:22"],"os":{"family":"darwin","platform":"darwin","name":"Mac OS X","version":"10.14.1","major":10,"minor":14,"patch":1,"build":"18B75"},"timezone":"CET","timezone_offset_sec":3600}}}
+2018-11-18T11:09:17.511+0100 INFO [beat] instance/beat.go:894 Process info {"system_info": {"process": {"cwd": "/Users/gaugendre/Projects/golang/src/github.com/Crocmagnon/youtubebeat", "exe": "./youtubebeat", "name": "youtubebeat", "pid": 75764, "ppid": 74760, "start_time": "2018-11-18T11:09:17.481+0100"}}}
+2018-11-18T11:09:17.511+0100 INFO instance/beat.go:302 Setup Beat: youtubebeat; Version: 6.5.0
diff --git a/vendor/github.com/PuerkitoBio/goquery/LICENSE b/vendor/github.com/PuerkitoBio/goquery/LICENSE
new file mode 100644
index 0000000..f743d37
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/LICENSE
@@ -0,0 +1,12 @@
+Copyright (c) 2012-2016, Martin Angers & Contributors
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+* Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/PuerkitoBio/goquery/README.md b/vendor/github.com/PuerkitoBio/goquery/README.md
new file mode 100644
index 0000000..84f9af3
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/README.md
@@ -0,0 +1,179 @@
+# goquery - a little like that j-thing, only in Go
+[![build status](https://secure.travis-ci.org/PuerkitoBio/goquery.svg?branch=master)](http://travis-ci.org/PuerkitoBio/goquery) [![GoDoc](https://godoc.org/github.com/PuerkitoBio/goquery?status.png)](http://godoc.org/github.com/PuerkitoBio/goquery) [![Sourcegraph Badge](https://sourcegraph.com/github.com/PuerkitoBio/goquery/-/badge.svg)](https://sourcegraph.com/github.com/PuerkitoBio/goquery?badge)
+
+goquery brings a syntax and a set of features similar to [jQuery][] to the [Go language][go]. It is based on Go's [net/html package][html] and the CSS Selector library [cascadia][]. Since the net/html parser returns nodes, and not a full-featured DOM tree, jQuery's stateful manipulation functions (like height(), css(), detach()) have been left off.
+
+Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML. See the [wiki][] for various options to do this.
+
+Syntax-wise, it is as close as possible to jQuery, with the same function names when possible, and that warm and fuzzy chainable interface. jQuery being the ultra-popular library that it is, I felt that writing a similar HTML-manipulating library was better to follow its API than to start anew (in the same spirit as Go's `fmt` package), even though some of its methods are less than intuitive (looking at you, [index()][index]...).
+
+## Table of Contents
+
+* [Installation](#installation)
+* [Changelog](#changelog)
+* [API](#api)
+* [Examples](#examples)
+* [Related Projects](#related-projects)
+* [Support](#support)
+* [License](#license)
+
+## Installation
+
+Please note that because of the net/html dependency, goquery requires Go1.1+.
+
+ $ go get github.com/PuerkitoBio/goquery
+
+(optional) To run unit tests:
+
+ $ cd $GOPATH/src/github.com/PuerkitoBio/goquery
+ $ go test
+
+(optional) To run benchmarks (warning: it runs for a few minutes):
+
+ $ cd $GOPATH/src/github.com/PuerkitoBio/goquery
+ $ go test -bench=".*"
+
+## Changelog
+
+**Note that goquery's API is now stable, and will not break.**
+
+* **2018-11-15 (v1.5.0)** : Go module support (thanks @Zaba505).
+* **2018-06-07 (v1.4.1)** : Add `NewDocumentFromReader` examples.
+* **2018-03-24 (v1.4.0)** : Deprecate `NewDocument(url)` and `NewDocumentFromResponse(response)`.
+* **2018-01-28 (v1.3.0)** : Add `ToEnd` constant to `Slice` until the end of the selection (thanks to @davidjwilkins for raising the issue).
+* **2018-01-11 (v1.2.0)** : Add `AddBack*` and deprecate `AndSelf` (thanks to @davidjwilkins).
+* **2017-02-12 (v1.1.0)** : Add `SetHtml` and `SetText` (thanks to @glebtv).
+* **2016-12-29 (v1.0.2)** : Optimize allocations for `Selection.Text` (thanks to @radovskyb).
+* **2016-08-28 (v1.0.1)** : Optimize performance for large documents.
+* **2016-07-27 (v1.0.0)** : Tag version 1.0.0.
+* **2016-06-15** : Invalid selector strings internally compile to a `Matcher` implementation that never matches any node (instead of a panic). So for example, `doc.Find("~")` returns an empty `*Selection` object.
+* **2016-02-02** : Add `NodeName` utility function similar to the DOM's `nodeName` property. It returns the tag name of the first element in a selection, and other relevant values of non-element nodes (see godoc for details). Add `OuterHtml` utility function similar to the DOM's `outerHTML` property (named `OuterHtml` in small caps for consistency with the existing `Html` method on the `Selection`).
+* **2015-04-20** : Add `AttrOr` helper method to return the attribute's value or a default value if absent. Thanks to [piotrkowalczuk][piotr].
+* **2015-02-04** : Add more manipulation functions - Prepend* - thanks again to [Andrew Stone][thatguystone].
+* **2014-11-28** : Add more manipulation functions - ReplaceWith*, Wrap* and Unwrap - thanks again to [Andrew Stone][thatguystone].
+* **2014-11-07** : Add manipulation functions (thanks to [Andrew Stone][thatguystone]) and `*Matcher` functions, that receive compiled cascadia selectors instead of selector strings, thus avoiding potential panics thrown by goquery via `cascadia.MustCompile` calls. This results in better performance (selectors can be compiled once and reused) and more idiomatic error handling (you can handle cascadia's compilation errors, instead of recovering from panics, which had been bugging me for a long time). Note that the actual type expected is a `Matcher` interface, that `cascadia.Selector` implements. Other matcher implementations could be used.
+* **2014-11-06** : Change import paths of net/html to golang.org/x/net/html (see https://groups.google.com/forum/#!topic/golang-nuts/eD8dh3T9yyA). Make sure to update your code to use the new import path too when you call goquery with `html.Node`s.
+* **v0.3.2** : Add `NewDocumentFromReader()` (thanks jweir) which allows creating a goquery document from an io.Reader.
+* **v0.3.1** : Add `NewDocumentFromResponse()` (thanks assassingj) which allows creating a goquery document from an http response.
+* **v0.3.0** : Add `EachWithBreak()` which allows to break out of an `Each()` loop by returning false. This function was added instead of changing the existing `Each()` to avoid breaking compatibility.
+* **v0.2.1** : Make go-getable, now that [go.net/html is Go1.0-compatible][gonet] (thanks to @matrixik for pointing this out).
+* **v0.2.0** : Add support for negative indices in Slice(). **BREAKING CHANGE** `Document.Root` is removed, `Document` is now a `Selection` itself (a selection of one, the root element, just like `Document.Root` was before). Add jQuery's Closest() method.
+* **v0.1.1** : Add benchmarks to use as baseline for refactorings, refactor Next...() and Prev...() methods to use the new html package's linked list features (Next/PrevSibling, FirstChild). Good performance boost (40+% in some cases).
+* **v0.1.0** : Initial release.
+
+## API
+
+goquery exposes two structs, `Document` and `Selection`, and the `Matcher` interface. Unlike jQuery, which is loaded as part of a DOM document, and thus acts on its containing document, goquery doesn't know which HTML document to act upon. So it needs to be told, and that's what the `Document` type is for. It holds the root document node as the initial Selection value to manipulate.
+
+jQuery often has many variants for the same function (no argument, a selector string argument, a jQuery object argument, a DOM element argument, ...). Instead of exposing the same features in goquery as a single method with variadic empty interface arguments, statically-typed signatures are used following this naming convention:
+
+* When the jQuery equivalent can be called with no argument, it has the same name as jQuery for the no argument signature (e.g.: `Prev()`), and the version with a selector string argument is called `XxxFiltered()` (e.g.: `PrevFiltered()`)
+* When the jQuery equivalent **requires** one argument, the same name as jQuery is used for the selector string version (e.g.: `Is()`)
+* The signatures accepting a jQuery object as argument are defined in goquery as `XxxSelection()` and take a `*Selection` object as argument (e.g.: `FilterSelection()`)
+* The signatures accepting a DOM element as argument in jQuery are defined in goquery as `XxxNodes()` and take a variadic argument of type `*html.Node` (e.g.: `FilterNodes()`)
+* The signatures accepting a function as argument in jQuery are defined in goquery as `XxxFunction()` and take a function as argument (e.g.: `FilterFunction()`)
+* The goquery methods that can be called with a selector string have a corresponding version that take a `Matcher` interface and are defined as `XxxMatcher()` (e.g.: `IsMatcher()`)
+
+Utility functions that are not in jQuery but are useful in Go are implemented as functions (that take a `*Selection` as parameter), to avoid a potential naming clash on the `*Selection`'s methods (reserved for jQuery-equivalent behaviour).
+
+The complete [godoc reference documentation can be found here][doc].
+
+Please note that Cascadia's selectors do not necessarily match all supported selectors of jQuery (Sizzle). See the [cascadia project][cascadia] for details. Invalid selector strings compile to a `Matcher` that fails to match any node. Behaviour of the various functions that take a selector string as argument follows from that fact, e.g. (where `~` is an invalid selector string):
+
+* `Find("~")` returns an empty selection because the selector string doesn't match anything.
+* `Add("~")` returns a new selection that holds the same nodes as the original selection, because it didn't add any node (selector string didn't match anything).
+* `ParentsFiltered("~")` returns an empty selection because the selector string doesn't match anything.
+* `ParentsUntil("~")` returns all parents of the selection because the selector string didn't match any element to stop before the top element.
+
+## Examples
+
+See some tips and tricks in the [wiki][].
+
+Adapted from example_test.go:
+
+```Go
+package main
+
+import (
+ "fmt"
+ "log"
+ "net/http"
+
+ "github.com/PuerkitoBio/goquery"
+)
+
+func ExampleScrape() {
+ // Request the HTML page.
+ res, err := http.Get("http://metalsucks.net")
+ if err != nil {
+ log.Fatal(err)
+ }
+ defer res.Body.Close()
+ if res.StatusCode != 200 {
+ log.Fatalf("status code error: %d %s", res.StatusCode, res.Status)
+ }
+
+ // Load the HTML document
+ doc, err := goquery.NewDocumentFromReader(res.Body)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Find the review items
+ doc.Find(".sidebar-reviews article .content-block").Each(func(i int, s *goquery.Selection) {
+ // For each item found, get the band and title
+ band := s.Find("a").Text()
+ title := s.Find("i").Text()
+ fmt.Printf("Review %d: %s - %s\n", i, band, title)
+ })
+}
+
+func main() {
+ ExampleScrape()
+}
+```
+
+## Related Projects
+
+- [Goq][goq], an HTML deserialization and scraping library based on goquery and struct tags.
+- [andybalholm/cascadia][cascadia], the CSS selector library used by goquery.
+- [suntong/cascadia][cascadiacli], a command-line interface to the cascadia CSS selector library, useful to test selectors.
+- [asciimoo/colly](https://github.com/asciimoo/colly), a lightning fast and elegant Scraping Framework
+- [gnulnx/goperf](https://github.com/gnulnx/goperf), a website performance test tool that also fetches static assets.
+- [MontFerret/ferret](https://github.com/MontFerret/ferret), declarative web scraping.
+
+## Support
+
+There are a number of ways you can support the project:
+
+* Use it, star it, build something with it, spread the word!
+ - If you do build something open-source or otherwise publicly-visible, let me know so I can add it to the [Related Projects](#related-projects) section!
+* Raise issues to improve the project (note: doc typos and clarifications are issues too!)
+ - Please search existing issues before opening a new one - it may have already been adressed.
+* Pull requests: please discuss new code in an issue first, unless the fix is really trivial.
+ - Make sure new code is tested.
+ - Be mindful of existing code - PRs that break existing code have a high probability of being declined, unless it fixes a serious issue.
+
+If you desperately want to send money my way, I have a BuyMeACoffee.com page:
+
+
+
+## License
+
+The [BSD 3-Clause license][bsd], the same as the [Go language][golic]. Cascadia's license is [here][caslic].
+
+[jquery]: http://jquery.com/
+[go]: http://golang.org/
+[cascadia]: https://github.com/andybalholm/cascadia
+[cascadiacli]: https://github.com/suntong/cascadia
+[bsd]: http://opensource.org/licenses/BSD-3-Clause
+[golic]: http://golang.org/LICENSE
+[caslic]: https://github.com/andybalholm/cascadia/blob/master/LICENSE
+[doc]: http://godoc.org/github.com/PuerkitoBio/goquery
+[index]: http://api.jquery.com/index/
+[gonet]: https://github.com/golang/net/
+[html]: http://godoc.org/golang.org/x/net/html
+[wiki]: https://github.com/PuerkitoBio/goquery/wiki/Tips-and-tricks
+[thatguystone]: https://github.com/thatguystone
+[piotr]: https://github.com/piotrkowalczuk
+[goq]: https://github.com/andrewstuart/goq
diff --git a/vendor/github.com/PuerkitoBio/goquery/array.go b/vendor/github.com/PuerkitoBio/goquery/array.go
new file mode 100644
index 0000000..1b1f6cb
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/array.go
@@ -0,0 +1,124 @@
+package goquery
+
+import (
+ "golang.org/x/net/html"
+)
+
+const (
+ maxUint = ^uint(0)
+ maxInt = int(maxUint >> 1)
+
+ // ToEnd is a special index value that can be used as end index in a call
+ // to Slice so that all elements are selected until the end of the Selection.
+ // It is equivalent to passing (*Selection).Length().
+ ToEnd = maxInt
+)
+
+// First reduces the set of matched elements to the first in the set.
+// It returns a new Selection object, and an empty Selection object if the
+// the selection is empty.
+func (s *Selection) First() *Selection {
+ return s.Eq(0)
+}
+
+// Last reduces the set of matched elements to the last in the set.
+// It returns a new Selection object, and an empty Selection object if
+// the selection is empty.
+func (s *Selection) Last() *Selection {
+ return s.Eq(-1)
+}
+
+// Eq reduces the set of matched elements to the one at the specified index.
+// If a negative index is given, it counts backwards starting at the end of the
+// set. It returns a new Selection object, and an empty Selection object if the
+// index is invalid.
+func (s *Selection) Eq(index int) *Selection {
+ if index < 0 {
+ index += len(s.Nodes)
+ }
+
+ if index >= len(s.Nodes) || index < 0 {
+ return newEmptySelection(s.document)
+ }
+
+ return s.Slice(index, index+1)
+}
+
+// Slice reduces the set of matched elements to a subset specified by a range
+// of indices. The start index is 0-based and indicates the index of the first
+// element to select. The end index is 0-based and indicates the index at which
+// the elements stop being selected (the end index is not selected).
+//
+// The indices may be negative, in which case they represent an offset from the
+// end of the selection.
+//
+// The special value ToEnd may be specified as end index, in which case all elements
+// until the end are selected. This works both for a positive and negative start
+// index.
+func (s *Selection) Slice(start, end int) *Selection {
+ if start < 0 {
+ start += len(s.Nodes)
+ }
+ if end == ToEnd {
+ end = len(s.Nodes)
+ } else if end < 0 {
+ end += len(s.Nodes)
+ }
+ return pushStack(s, s.Nodes[start:end])
+}
+
+// Get retrieves the underlying node at the specified index.
+// Get without parameter is not implemented, since the node array is available
+// on the Selection object.
+func (s *Selection) Get(index int) *html.Node {
+ if index < 0 {
+ index += len(s.Nodes) // Negative index gets from the end
+ }
+ return s.Nodes[index]
+}
+
+// Index returns the position of the first element within the Selection object
+// relative to its sibling elements.
+func (s *Selection) Index() int {
+ if len(s.Nodes) > 0 {
+ return newSingleSelection(s.Nodes[0], s.document).PrevAll().Length()
+ }
+ return -1
+}
+
+// IndexSelector returns the position of the first element within the
+// Selection object relative to the elements matched by the selector, or -1 if
+// not found.
+func (s *Selection) IndexSelector(selector string) int {
+ if len(s.Nodes) > 0 {
+ sel := s.document.Find(selector)
+ return indexInSlice(sel.Nodes, s.Nodes[0])
+ }
+ return -1
+}
+
+// IndexMatcher returns the position of the first element within the
+// Selection object relative to the elements matched by the matcher, or -1 if
+// not found.
+func (s *Selection) IndexMatcher(m Matcher) int {
+ if len(s.Nodes) > 0 {
+ sel := s.document.FindMatcher(m)
+ return indexInSlice(sel.Nodes, s.Nodes[0])
+ }
+ return -1
+}
+
+// IndexOfNode returns the position of the specified node within the Selection
+// object, or -1 if not found.
+func (s *Selection) IndexOfNode(node *html.Node) int {
+ return indexInSlice(s.Nodes, node)
+}
+
+// IndexOfSelection returns the position of the first node in the specified
+// Selection object within this Selection object, or -1 if not found.
+func (s *Selection) IndexOfSelection(sel *Selection) int {
+ if sel != nil && len(sel.Nodes) > 0 {
+ return indexInSlice(s.Nodes, sel.Nodes[0])
+ }
+ return -1
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/array_test.go b/vendor/github.com/PuerkitoBio/goquery/array_test.go
new file mode 100644
index 0000000..7857b38
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/array_test.go
@@ -0,0 +1,234 @@
+package goquery
+
+import (
+ "testing"
+)
+
+func TestFirst(t *testing.T) {
+ sel := Doc().Find(".pvk-content").First()
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestFirstEmpty(t *testing.T) {
+ sel := Doc().Find(".pvk-zzcontentzz").First()
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestFirstInvalid(t *testing.T) {
+ sel := Doc().Find("").First()
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestFirstRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.First().End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestLast(t *testing.T) {
+ sel := Doc().Find(".pvk-content").Last()
+ assertLength(t, sel.Nodes, 1)
+
+ // Should contain Footer
+ foot := Doc().Find(".footer")
+ if !sel.Contains(foot.Nodes[0]) {
+ t.Error("Last .pvk-content should contain .footer.")
+ }
+}
+
+func TestLastEmpty(t *testing.T) {
+ sel := Doc().Find(".pvk-zzcontentzz").Last()
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestLastInvalid(t *testing.T) {
+ sel := Doc().Find("").Last()
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestLastRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.Last().End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestEq(t *testing.T) {
+ sel := Doc().Find(".pvk-content").Eq(1)
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestEqNegative(t *testing.T) {
+ sel := Doc().Find(".pvk-content").Eq(-1)
+ assertLength(t, sel.Nodes, 1)
+
+ // Should contain Footer
+ foot := Doc().Find(".footer")
+ if !sel.Contains(foot.Nodes[0]) {
+ t.Error("Index -1 of .pvk-content should contain .footer.")
+ }
+}
+
+func TestEqEmpty(t *testing.T) {
+ sel := Doc().Find("something_random_that_does_not_exists").Eq(0)
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestEqInvalid(t *testing.T) {
+ sel := Doc().Find("").Eq(0)
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestEqInvalidPositive(t *testing.T) {
+ sel := Doc().Find(".pvk-content").Eq(3)
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestEqInvalidNegative(t *testing.T) {
+ sel := Doc().Find(".pvk-content").Eq(-4)
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestEqRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.Eq(1).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestSlice(t *testing.T) {
+ sel := Doc().Find(".pvk-content").Slice(0, 2)
+
+ assertLength(t, sel.Nodes, 2)
+ assertSelectionIs(t, sel, "#pc1", "#pc2")
+}
+
+func TestSliceToEnd(t *testing.T) {
+ sel := Doc().Find(".pvk-content").Slice(1, ToEnd)
+
+ assertLength(t, sel.Nodes, 2)
+ assertSelectionIs(t, sel.Eq(0), "#pc2")
+ if _, ok := sel.Eq(1).Attr("id"); ok {
+ t.Error("Want no attribute ID, got one")
+ }
+}
+
+func TestSliceEmpty(t *testing.T) {
+ defer assertPanic(t)
+ Doc().Find("x").Slice(0, 2)
+}
+
+func TestSliceInvalid(t *testing.T) {
+ defer assertPanic(t)
+ Doc().Find("").Slice(0, 2)
+}
+
+func TestSliceInvalidToEnd(t *testing.T) {
+ defer assertPanic(t)
+ Doc().Find("").Slice(2, ToEnd)
+}
+
+func TestSliceOutOfBounds(t *testing.T) {
+ defer assertPanic(t)
+ Doc().Find(".pvk-content").Slice(2, 12)
+}
+
+func TestNegativeSliceStart(t *testing.T) {
+ sel := Doc().Find(".container-fluid").Slice(-2, 3)
+ assertLength(t, sel.Nodes, 1)
+ assertSelectionIs(t, sel.Eq(0), "#cf3")
+}
+
+func TestNegativeSliceEnd(t *testing.T) {
+ sel := Doc().Find(".container-fluid").Slice(1, -1)
+ assertLength(t, sel.Nodes, 2)
+ assertSelectionIs(t, sel.Eq(0), "#cf2")
+ assertSelectionIs(t, sel.Eq(1), "#cf3")
+}
+
+func TestNegativeSliceBoth(t *testing.T) {
+ sel := Doc().Find(".container-fluid").Slice(-3, -1)
+ assertLength(t, sel.Nodes, 2)
+ assertSelectionIs(t, sel.Eq(0), "#cf2")
+ assertSelectionIs(t, sel.Eq(1), "#cf3")
+}
+
+func TestNegativeSliceToEnd(t *testing.T) {
+ sel := Doc().Find(".container-fluid").Slice(-3, ToEnd)
+ assertLength(t, sel.Nodes, 3)
+ assertSelectionIs(t, sel, "#cf2", "#cf3", "#cf4")
+}
+
+func TestNegativeSliceOutOfBounds(t *testing.T) {
+ defer assertPanic(t)
+ Doc().Find(".container-fluid").Slice(-12, -7)
+}
+
+func TestSliceRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.Slice(0, 2).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestGet(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ node := sel.Get(1)
+ if sel.Nodes[1] != node {
+ t.Errorf("Expected node %v to be %v.", node, sel.Nodes[1])
+ }
+}
+
+func TestGetNegative(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ node := sel.Get(-3)
+ if sel.Nodes[0] != node {
+ t.Errorf("Expected node %v to be %v.", node, sel.Nodes[0])
+ }
+}
+
+func TestGetInvalid(t *testing.T) {
+ defer assertPanic(t)
+ sel := Doc().Find(".pvk-content")
+ sel.Get(129)
+}
+
+func TestIndex(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ if i := sel.Index(); i != 1 {
+ t.Errorf("Expected index of 1, got %v.", i)
+ }
+}
+
+func TestIndexSelector(t *testing.T) {
+ sel := Doc().Find(".hero-unit")
+ if i := sel.IndexSelector("div"); i != 4 {
+ t.Errorf("Expected index of 4, got %v.", i)
+ }
+}
+
+func TestIndexSelectorInvalid(t *testing.T) {
+ sel := Doc().Find(".hero-unit")
+ if i := sel.IndexSelector(""); i != -1 {
+ t.Errorf("Expected index of -1, got %v.", i)
+ }
+}
+
+func TestIndexOfNode(t *testing.T) {
+ sel := Doc().Find("div.pvk-gutter")
+ if i := sel.IndexOfNode(sel.Nodes[1]); i != 1 {
+ t.Errorf("Expected index of 1, got %v.", i)
+ }
+}
+
+func TestIndexOfNilNode(t *testing.T) {
+ sel := Doc().Find("div.pvk-gutter")
+ if i := sel.IndexOfNode(nil); i != -1 {
+ t.Errorf("Expected index of -1, got %v.", i)
+ }
+}
+
+func TestIndexOfSelection(t *testing.T) {
+ sel := Doc().Find("div")
+ sel2 := Doc().Find(".hero-unit")
+ if i := sel.IndexOfSelection(sel2); i != 4 {
+ t.Errorf("Expected index of 4, got %v.", i)
+ }
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_array_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_array_test.go
new file mode 100644
index 0000000..29c7e20
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/bench_array_test.go
@@ -0,0 +1,120 @@
+package goquery
+
+import (
+ "testing"
+)
+
+func BenchmarkFirst(b *testing.B) {
+ b.StopTimer()
+ sel := DocB().Find("dd")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ sel.First()
+ }
+}
+
+func BenchmarkLast(b *testing.B) {
+ b.StopTimer()
+ sel := DocB().Find("dd")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ sel.Last()
+ }
+}
+
+func BenchmarkEq(b *testing.B) {
+ b.StopTimer()
+ sel := DocB().Find("dd")
+ j := 0
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ sel.Eq(j)
+ if j++; j >= sel.Length() {
+ j = 0
+ }
+ }
+}
+
+func BenchmarkSlice(b *testing.B) {
+ b.StopTimer()
+ sel := DocB().Find("dd")
+ j := 0
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ sel.Slice(j, j+4)
+ if j++; j >= (sel.Length() - 4) {
+ j = 0
+ }
+ }
+}
+
+func BenchmarkGet(b *testing.B) {
+ b.StopTimer()
+ sel := DocB().Find("dd")
+ j := 0
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ sel.Get(j)
+ if j++; j >= sel.Length() {
+ j = 0
+ }
+ }
+}
+
+func BenchmarkIndex(b *testing.B) {
+ var j int
+
+ b.StopTimer()
+ sel := DocB().Find("#Main")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ j = sel.Index()
+ }
+ if j != 3 {
+ b.Fatalf("want 3, got %d", j)
+ }
+}
+
+func BenchmarkIndexSelector(b *testing.B) {
+ var j int
+
+ b.StopTimer()
+ sel := DocB().Find("#manual-nav dl dd:nth-child(1)")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ j = sel.IndexSelector("dd")
+ }
+ if j != 4 {
+ b.Fatalf("want 4, got %d", j)
+ }
+}
+
+func BenchmarkIndexOfNode(b *testing.B) {
+ var j int
+
+ b.StopTimer()
+ sel := DocB().Find("span a")
+ sel2 := DocB().Find("span a:nth-child(3)")
+ n := sel2.Get(0)
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ j = sel.IndexOfNode(n)
+ }
+ if j != 2 {
+ b.Fatalf("want 2, got %d", j)
+ }
+}
+
+func BenchmarkIndexOfSelection(b *testing.B) {
+ var j int
+ b.StopTimer()
+ sel := DocB().Find("span a")
+ sel2 := DocB().Find("span a:nth-child(3)")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ j = sel.IndexOfSelection(sel2)
+ }
+ if j != 2 {
+ b.Fatalf("want 2, got %d", j)
+ }
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_example_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_example_test.go
new file mode 100644
index 0000000..ba9ebe5
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/bench_example_test.go
@@ -0,0 +1,40 @@
+package goquery
+
+import (
+ "bytes"
+ "fmt"
+ "strconv"
+ "testing"
+)
+
+func BenchmarkMetalReviewExample(b *testing.B) {
+ var n int
+ var buf bytes.Buffer
+
+ b.StopTimer()
+ doc := loadDoc("metalreview.html")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ doc.Find(".slider-row:nth-child(1) .slider-item").Each(func(i int, s *Selection) {
+ var band, title string
+ var score float64
+ var e error
+
+ n++
+ // For each item found, get the band, title and score, and print it
+ band = s.Find("strong").Text()
+ title = s.Find("em").Text()
+ if score, e = strconv.ParseFloat(s.Find(".score").Text(), 64); e != nil {
+ // Not a valid float, ignore score
+ if n <= 4 {
+ buf.WriteString(fmt.Sprintf("Review %d: %s - %s.\n", i, band, title))
+ }
+ } else {
+ // Print all, including score
+ if n <= 4 {
+ buf.WriteString(fmt.Sprintf("Review %d: %s - %s (%2.1f).\n", i, band, title, score))
+ }
+ }
+ })
+ }
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_expand_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_expand_test.go
new file mode 100644
index 0000000..61f1947
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/bench_expand_test.go
@@ -0,0 +1,104 @@
+package goquery
+
+import (
+ "testing"
+)
+
+func BenchmarkAdd(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocB().Find("dd")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.Add("h2[title]").Length()
+ } else {
+ sel.Add("h2[title]")
+ }
+ }
+ if n != 43 {
+ b.Fatalf("want 43, got %d", n)
+ }
+}
+
+func BenchmarkAddSelection(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocB().Find("dd")
+ sel2 := DocB().Find("h2[title]")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.AddSelection(sel2).Length()
+ } else {
+ sel.AddSelection(sel2)
+ }
+ }
+ if n != 43 {
+ b.Fatalf("want 43, got %d", n)
+ }
+}
+
+func BenchmarkAddNodes(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocB().Find("dd")
+ sel2 := DocB().Find("h2[title]")
+ nodes := sel2.Nodes
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.AddNodes(nodes...).Length()
+ } else {
+ sel.AddNodes(nodes...)
+ }
+ }
+ if n != 43 {
+ b.Fatalf("want 43, got %d", n)
+ }
+}
+
+func BenchmarkAddNodesBig(b *testing.B) {
+ var n int
+
+ doc := DocW()
+ sel := doc.Find("li")
+ // make nodes > 1000
+ nodes := sel.Nodes
+ nodes = append(nodes, nodes...)
+ nodes = append(nodes, nodes...)
+ sel = doc.Find("xyz")
+ b.ResetTimer()
+
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.AddNodes(nodes...).Length()
+ } else {
+ sel.AddNodes(nodes...)
+ }
+ }
+ if n != 373 {
+ b.Fatalf("want 373, got %d", n)
+ }
+}
+
+func BenchmarkAndSelf(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocB().Find("dd").Parent()
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.AndSelf().Length()
+ } else {
+ sel.AndSelf()
+ }
+ }
+ if n != 44 {
+ b.Fatalf("want 44, got %d", n)
+ }
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_filter_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_filter_test.go
new file mode 100644
index 0000000..38e39f5
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/bench_filter_test.go
@@ -0,0 +1,236 @@
+package goquery
+
+import (
+ "testing"
+)
+
+func BenchmarkFilter(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.Filter(".toclevel-1").Length()
+ } else {
+ sel.Filter(".toclevel-1")
+ }
+ }
+ if n != 13 {
+ b.Fatalf("want 13, got %d", n)
+ }
+}
+
+func BenchmarkNot(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.Not(".toclevel-2").Length()
+ } else {
+ sel.Filter(".toclevel-2")
+ }
+ }
+ if n != 371 {
+ b.Fatalf("want 371, got %d", n)
+ }
+}
+
+func BenchmarkFilterFunction(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ f := func(i int, s *Selection) bool {
+ return len(s.Get(0).Attr) > 0
+ }
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.FilterFunction(f).Length()
+ } else {
+ sel.FilterFunction(f)
+ }
+ }
+ if n != 112 {
+ b.Fatalf("want 112, got %d", n)
+ }
+}
+
+func BenchmarkNotFunction(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ f := func(i int, s *Selection) bool {
+ return len(s.Get(0).Attr) > 0
+ }
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.NotFunction(f).Length()
+ } else {
+ sel.NotFunction(f)
+ }
+ }
+ if n != 261 {
+ b.Fatalf("want 261, got %d", n)
+ }
+}
+
+func BenchmarkFilterNodes(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ sel2 := DocW().Find(".toclevel-2")
+ nodes := sel2.Nodes
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.FilterNodes(nodes...).Length()
+ } else {
+ sel.FilterNodes(nodes...)
+ }
+ }
+ if n != 2 {
+ b.Fatalf("want 2, got %d", n)
+ }
+}
+
+func BenchmarkNotNodes(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ sel2 := DocW().Find(".toclevel-1")
+ nodes := sel2.Nodes
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.NotNodes(nodes...).Length()
+ } else {
+ sel.NotNodes(nodes...)
+ }
+ }
+ if n != 360 {
+ b.Fatalf("want 360, got %d", n)
+ }
+}
+
+func BenchmarkFilterSelection(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ sel2 := DocW().Find(".toclevel-2")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.FilterSelection(sel2).Length()
+ } else {
+ sel.FilterSelection(sel2)
+ }
+ }
+ if n != 2 {
+ b.Fatalf("want 2, got %d", n)
+ }
+}
+
+func BenchmarkNotSelection(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ sel2 := DocW().Find(".toclevel-1")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.NotSelection(sel2).Length()
+ } else {
+ sel.NotSelection(sel2)
+ }
+ }
+ if n != 360 {
+ b.Fatalf("want 360, got %d", n)
+ }
+}
+
+func BenchmarkHas(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("h2")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.Has(".editsection").Length()
+ } else {
+ sel.Has(".editsection")
+ }
+ }
+ if n != 13 {
+ b.Fatalf("want 13, got %d", n)
+ }
+}
+
+func BenchmarkHasNodes(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ sel2 := DocW().Find(".tocnumber")
+ nodes := sel2.Nodes
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.HasNodes(nodes...).Length()
+ } else {
+ sel.HasNodes(nodes...)
+ }
+ }
+ if n != 15 {
+ b.Fatalf("want 15, got %d", n)
+ }
+}
+
+func BenchmarkHasSelection(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ sel2 := DocW().Find(".tocnumber")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.HasSelection(sel2).Length()
+ } else {
+ sel.HasSelection(sel2)
+ }
+ }
+ if n != 15 {
+ b.Fatalf("want 15, got %d", n)
+ }
+}
+
+func BenchmarkEnd(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li").Has(".tocnumber")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.End().Length()
+ } else {
+ sel.End()
+ }
+ }
+ if n != 373 {
+ b.Fatalf("want 373, got %d", n)
+ }
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_iteration_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_iteration_test.go
new file mode 100644
index 0000000..39445b0
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/bench_iteration_test.go
@@ -0,0 +1,68 @@
+package goquery
+
+import (
+ "testing"
+)
+
+func BenchmarkEach(b *testing.B) {
+ var tmp, n int
+
+ b.StopTimer()
+ sel := DocW().Find("td")
+ f := func(i int, s *Selection) {
+ tmp++
+ }
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ sel.Each(f)
+ if n == 0 {
+ n = tmp
+ }
+ }
+ if n != 59 {
+ b.Fatalf("want 59, got %d", n)
+ }
+}
+
+func BenchmarkMap(b *testing.B) {
+ var tmp, n int
+
+ b.StopTimer()
+ sel := DocW().Find("td")
+ f := func(i int, s *Selection) string {
+ tmp++
+ return string(tmp)
+ }
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ sel.Map(f)
+ if n == 0 {
+ n = tmp
+ }
+ }
+ if n != 59 {
+ b.Fatalf("want 59, got %d", n)
+ }
+}
+
+func BenchmarkEachWithBreak(b *testing.B) {
+ var tmp, n int
+
+ b.StopTimer()
+ sel := DocW().Find("td")
+ f := func(i int, s *Selection) bool {
+ tmp++
+ return tmp < 10
+ }
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ tmp = 0
+ sel.EachWithBreak(f)
+ if n == 0 {
+ n = tmp
+ }
+ }
+ if n != 10 {
+ b.Fatalf("want 10, got %d", n)
+ }
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_property_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_property_test.go
new file mode 100644
index 0000000..8acf5bf
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/bench_property_test.go
@@ -0,0 +1,51 @@
+package goquery
+
+import (
+ "testing"
+)
+
+func BenchmarkAttr(b *testing.B) {
+ var s string
+
+ b.StopTimer()
+ sel := DocW().Find("h1")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ s, _ = sel.Attr("id")
+ }
+ if s != "firstHeading" {
+ b.Fatalf("want firstHeading, got %q", s)
+ }
+}
+
+func BenchmarkText(b *testing.B) {
+ b.StopTimer()
+ sel := DocW().Find("h2")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ sel.Text()
+ }
+}
+
+func BenchmarkLength(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("h2")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ n = sel.Length()
+ }
+ if n != 14 {
+ b.Fatalf("want 14, got %d", n)
+ }
+}
+
+func BenchmarkHtml(b *testing.B) {
+ b.StopTimer()
+ sel := DocW().Find("h2")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ sel.Html()
+ }
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_query_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_query_test.go
new file mode 100644
index 0000000..64fdbc4
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/bench_query_test.go
@@ -0,0 +1,111 @@
+package goquery
+
+import (
+ "testing"
+)
+
+func BenchmarkIs(b *testing.B) {
+ var y bool
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ y = sel.Is(".toclevel-2")
+ }
+ if !y {
+ b.Fatal("want true")
+ }
+}
+
+func BenchmarkIsPositional(b *testing.B) {
+ var y bool
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ y = sel.Is("li:nth-child(2)")
+ }
+ if !y {
+ b.Fatal("want true")
+ }
+}
+
+func BenchmarkIsFunction(b *testing.B) {
+ var y bool
+
+ b.StopTimer()
+ sel := DocW().Find(".toclevel-1")
+ f := func(i int, s *Selection) bool {
+ return i == 8
+ }
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ y = sel.IsFunction(f)
+ }
+ if !y {
+ b.Fatal("want true")
+ }
+}
+
+func BenchmarkIsSelection(b *testing.B) {
+ var y bool
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ sel2 := DocW().Find(".toclevel-2")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ y = sel.IsSelection(sel2)
+ }
+ if !y {
+ b.Fatal("want true")
+ }
+}
+
+func BenchmarkIsNodes(b *testing.B) {
+ var y bool
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ sel2 := DocW().Find(".toclevel-2")
+ nodes := sel2.Nodes
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ y = sel.IsNodes(nodes...)
+ }
+ if !y {
+ b.Fatal("want true")
+ }
+}
+
+func BenchmarkHasClass(b *testing.B) {
+ var y bool
+
+ b.StopTimer()
+ sel := DocW().Find("span")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ y = sel.HasClass("official")
+ }
+ if !y {
+ b.Fatal("want true")
+ }
+}
+
+func BenchmarkContains(b *testing.B) {
+ var y bool
+
+ b.StopTimer()
+ sel := DocW().Find("span.url")
+ sel2 := DocW().Find("a[rel=\"nofollow\"]")
+ node := sel2.Nodes[0]
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ y = sel.Contains(node)
+ }
+ if !y {
+ b.Fatal("want true")
+ }
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/bench_traversal_test.go b/vendor/github.com/PuerkitoBio/goquery/bench_traversal_test.go
new file mode 100644
index 0000000..de84bcd
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/bench_traversal_test.go
@@ -0,0 +1,802 @@
+package goquery
+
+import (
+ "testing"
+)
+
+func BenchmarkFind(b *testing.B) {
+ var n int
+
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = DocB().Find("dd").Length()
+
+ } else {
+ DocB().Find("dd")
+ }
+ }
+ if n != 41 {
+ b.Fatalf("want 41, got %d", n)
+ }
+}
+
+func BenchmarkFindWithinSelection(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("ul")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.Find("a[class]").Length()
+ } else {
+ sel.Find("a[class]")
+ }
+ }
+ if n != 39 {
+ b.Fatalf("want 39, got %d", n)
+ }
+}
+
+func BenchmarkFindSelection(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("ul")
+ sel2 := DocW().Find("span")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.FindSelection(sel2).Length()
+ } else {
+ sel.FindSelection(sel2)
+ }
+ }
+ if n != 73 {
+ b.Fatalf("want 73, got %d", n)
+ }
+}
+
+func BenchmarkFindNodes(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("ul")
+ sel2 := DocW().Find("span")
+ nodes := sel2.Nodes
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.FindNodes(nodes...).Length()
+ } else {
+ sel.FindNodes(nodes...)
+ }
+ }
+ if n != 73 {
+ b.Fatalf("want 73, got %d", n)
+ }
+}
+
+func BenchmarkContents(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find(".toclevel-1")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.Contents().Length()
+ } else {
+ sel.Contents()
+ }
+ }
+ if n != 16 {
+ b.Fatalf("want 16, got %d", n)
+ }
+}
+
+func BenchmarkContentsFiltered(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find(".toclevel-1")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.ContentsFiltered("a[href=\"#Examples\"]").Length()
+ } else {
+ sel.ContentsFiltered("a[href=\"#Examples\"]")
+ }
+ }
+ if n != 1 {
+ b.Fatalf("want 1, got %d", n)
+ }
+}
+
+func BenchmarkChildren(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find(".toclevel-2")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.Children().Length()
+ } else {
+ sel.Children()
+ }
+ }
+ if n != 2 {
+ b.Fatalf("want 2, got %d", n)
+ }
+}
+
+func BenchmarkChildrenFiltered(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("h3")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.ChildrenFiltered(".editsection").Length()
+ } else {
+ sel.ChildrenFiltered(".editsection")
+ }
+ }
+ if n != 2 {
+ b.Fatalf("want 2, got %d", n)
+ }
+}
+
+func BenchmarkParent(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.Parent().Length()
+ } else {
+ sel.Parent()
+ }
+ }
+ if n != 55 {
+ b.Fatalf("want 55, got %d", n)
+ }
+}
+
+func BenchmarkParentFiltered(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.ParentFiltered("ul[id]").Length()
+ } else {
+ sel.ParentFiltered("ul[id]")
+ }
+ }
+ if n != 4 {
+ b.Fatalf("want 4, got %d", n)
+ }
+}
+
+func BenchmarkParents(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("th a")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.Parents().Length()
+ } else {
+ sel.Parents()
+ }
+ }
+ if n != 73 {
+ b.Fatalf("want 73, got %d", n)
+ }
+}
+
+func BenchmarkParentsFiltered(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("th a")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.ParentsFiltered("tr").Length()
+ } else {
+ sel.ParentsFiltered("tr")
+ }
+ }
+ if n != 18 {
+ b.Fatalf("want 18, got %d", n)
+ }
+}
+
+func BenchmarkParentsUntil(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("th a")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.ParentsUntil("table").Length()
+ } else {
+ sel.ParentsUntil("table")
+ }
+ }
+ if n != 52 {
+ b.Fatalf("want 52, got %d", n)
+ }
+}
+
+func BenchmarkParentsUntilSelection(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("th a")
+ sel2 := DocW().Find("#content")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.ParentsUntilSelection(sel2).Length()
+ } else {
+ sel.ParentsUntilSelection(sel2)
+ }
+ }
+ if n != 70 {
+ b.Fatalf("want 70, got %d", n)
+ }
+}
+
+func BenchmarkParentsUntilNodes(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("th a")
+ sel2 := DocW().Find("#content")
+ nodes := sel2.Nodes
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.ParentsUntilNodes(nodes...).Length()
+ } else {
+ sel.ParentsUntilNodes(nodes...)
+ }
+ }
+ if n != 70 {
+ b.Fatalf("want 70, got %d", n)
+ }
+}
+
+func BenchmarkParentsFilteredUntil(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find(".toclevel-1 a")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.ParentsFilteredUntil(":nth-child(1)", "ul").Length()
+ } else {
+ sel.ParentsFilteredUntil(":nth-child(1)", "ul")
+ }
+ }
+ if n != 2 {
+ b.Fatalf("want 2, got %d", n)
+ }
+}
+
+func BenchmarkParentsFilteredUntilSelection(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find(".toclevel-1 a")
+ sel2 := DocW().Find("ul")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.ParentsFilteredUntilSelection(":nth-child(1)", sel2).Length()
+ } else {
+ sel.ParentsFilteredUntilSelection(":nth-child(1)", sel2)
+ }
+ }
+ if n != 2 {
+ b.Fatalf("want 2, got %d", n)
+ }
+}
+
+func BenchmarkParentsFilteredUntilNodes(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find(".toclevel-1 a")
+ sel2 := DocW().Find("ul")
+ nodes := sel2.Nodes
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.ParentsFilteredUntilNodes(":nth-child(1)", nodes...).Length()
+ } else {
+ sel.ParentsFilteredUntilNodes(":nth-child(1)", nodes...)
+ }
+ }
+ if n != 2 {
+ b.Fatalf("want 2, got %d", n)
+ }
+}
+
+func BenchmarkSiblings(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("ul li:nth-child(1)")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.Siblings().Length()
+ } else {
+ sel.Siblings()
+ }
+ }
+ if n != 293 {
+ b.Fatalf("want 293, got %d", n)
+ }
+}
+
+func BenchmarkSiblingsFiltered(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("ul li:nth-child(1)")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.SiblingsFiltered("[class]").Length()
+ } else {
+ sel.SiblingsFiltered("[class]")
+ }
+ }
+ if n != 46 {
+ b.Fatalf("want 46, got %d", n)
+ }
+}
+
+func BenchmarkNext(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li:nth-child(1)")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.Next().Length()
+ } else {
+ sel.Next()
+ }
+ }
+ if n != 49 {
+ b.Fatalf("want 49, got %d", n)
+ }
+}
+
+func BenchmarkNextFiltered(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li:nth-child(1)")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.NextFiltered("[class]").Length()
+ } else {
+ sel.NextFiltered("[class]")
+ }
+ }
+ if n != 6 {
+ b.Fatalf("want 6, got %d", n)
+ }
+}
+
+func BenchmarkNextAll(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li:nth-child(3)")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.NextAll().Length()
+ } else {
+ sel.NextAll()
+ }
+ }
+ if n != 234 {
+ b.Fatalf("want 234, got %d", n)
+ }
+}
+
+func BenchmarkNextAllFiltered(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li:nth-child(3)")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.NextAllFiltered("[class]").Length()
+ } else {
+ sel.NextAllFiltered("[class]")
+ }
+ }
+ if n != 33 {
+ b.Fatalf("want 33, got %d", n)
+ }
+}
+
+func BenchmarkPrev(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li:last-child")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.Prev().Length()
+ } else {
+ sel.Prev()
+ }
+ }
+ if n != 49 {
+ b.Fatalf("want 49, got %d", n)
+ }
+}
+
+func BenchmarkPrevFiltered(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li:last-child")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.PrevFiltered("[class]").Length()
+ } else {
+ sel.PrevFiltered("[class]")
+ }
+ }
+ // There is one more Prev li with a class, compared to Next li with a class
+ // (confirmed by looking at the HTML, this is ok)
+ if n != 7 {
+ b.Fatalf("want 7, got %d", n)
+ }
+}
+
+func BenchmarkPrevAll(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li:nth-child(4)")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.PrevAll().Length()
+ } else {
+ sel.PrevAll()
+ }
+ }
+ if n != 78 {
+ b.Fatalf("want 78, got %d", n)
+ }
+}
+
+func BenchmarkPrevAllFiltered(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li:nth-child(4)")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.PrevAllFiltered("[class]").Length()
+ } else {
+ sel.PrevAllFiltered("[class]")
+ }
+ }
+ if n != 6 {
+ b.Fatalf("want 6, got %d", n)
+ }
+}
+
+func BenchmarkNextUntil(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li:first-child")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.NextUntil(":nth-child(4)").Length()
+ } else {
+ sel.NextUntil(":nth-child(4)")
+ }
+ }
+ if n != 84 {
+ b.Fatalf("want 84, got %d", n)
+ }
+}
+
+func BenchmarkNextUntilSelection(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("h2")
+ sel2 := DocW().Find("ul")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.NextUntilSelection(sel2).Length()
+ } else {
+ sel.NextUntilSelection(sel2)
+ }
+ }
+ if n != 42 {
+ b.Fatalf("want 42, got %d", n)
+ }
+}
+
+func BenchmarkNextUntilNodes(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("h2")
+ sel2 := DocW().Find("p")
+ nodes := sel2.Nodes
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.NextUntilNodes(nodes...).Length()
+ } else {
+ sel.NextUntilNodes(nodes...)
+ }
+ }
+ if n != 12 {
+ b.Fatalf("want 12, got %d", n)
+ }
+}
+
+func BenchmarkPrevUntil(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("li:last-child")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.PrevUntil(":nth-child(4)").Length()
+ } else {
+ sel.PrevUntil(":nth-child(4)")
+ }
+ }
+ if n != 238 {
+ b.Fatalf("want 238, got %d", n)
+ }
+}
+
+func BenchmarkPrevUntilSelection(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("h2")
+ sel2 := DocW().Find("ul")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.PrevUntilSelection(sel2).Length()
+ } else {
+ sel.PrevUntilSelection(sel2)
+ }
+ }
+ if n != 49 {
+ b.Fatalf("want 49, got %d", n)
+ }
+}
+
+func BenchmarkPrevUntilNodes(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("h2")
+ sel2 := DocW().Find("p")
+ nodes := sel2.Nodes
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.PrevUntilNodes(nodes...).Length()
+ } else {
+ sel.PrevUntilNodes(nodes...)
+ }
+ }
+ if n != 11 {
+ b.Fatalf("want 11, got %d", n)
+ }
+}
+
+func BenchmarkNextFilteredUntil(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("h2")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.NextFilteredUntil("p", "div").Length()
+ } else {
+ sel.NextFilteredUntil("p", "div")
+ }
+ }
+ if n != 22 {
+ b.Fatalf("want 22, got %d", n)
+ }
+}
+
+func BenchmarkNextFilteredUntilSelection(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("h2")
+ sel2 := DocW().Find("div")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.NextFilteredUntilSelection("p", sel2).Length()
+ } else {
+ sel.NextFilteredUntilSelection("p", sel2)
+ }
+ }
+ if n != 22 {
+ b.Fatalf("want 22, got %d", n)
+ }
+}
+
+func BenchmarkNextFilteredUntilNodes(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("h2")
+ sel2 := DocW().Find("div")
+ nodes := sel2.Nodes
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.NextFilteredUntilNodes("p", nodes...).Length()
+ } else {
+ sel.NextFilteredUntilNodes("p", nodes...)
+ }
+ }
+ if n != 22 {
+ b.Fatalf("want 22, got %d", n)
+ }
+}
+
+func BenchmarkPrevFilteredUntil(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("h2")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.PrevFilteredUntil("p", "div").Length()
+ } else {
+ sel.PrevFilteredUntil("p", "div")
+ }
+ }
+ if n != 20 {
+ b.Fatalf("want 20, got %d", n)
+ }
+}
+
+func BenchmarkPrevFilteredUntilSelection(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("h2")
+ sel2 := DocW().Find("div")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.PrevFilteredUntilSelection("p", sel2).Length()
+ } else {
+ sel.PrevFilteredUntilSelection("p", sel2)
+ }
+ }
+ if n != 20 {
+ b.Fatalf("want 20, got %d", n)
+ }
+}
+
+func BenchmarkPrevFilteredUntilNodes(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := DocW().Find("h2")
+ sel2 := DocW().Find("div")
+ nodes := sel2.Nodes
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.PrevFilteredUntilNodes("p", nodes...).Length()
+ } else {
+ sel.PrevFilteredUntilNodes("p", nodes...)
+ }
+ }
+ if n != 20 {
+ b.Fatalf("want 20, got %d", n)
+ }
+}
+
+func BenchmarkClosest(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := Doc().Find(".container-fluid")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.Closest(".pvk-content").Length()
+ } else {
+ sel.Closest(".pvk-content")
+ }
+ }
+ if n != 2 {
+ b.Fatalf("want 2, got %d", n)
+ }
+}
+
+func BenchmarkClosestSelection(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := Doc().Find(".container-fluid")
+ sel2 := Doc().Find(".pvk-content")
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.ClosestSelection(sel2).Length()
+ } else {
+ sel.ClosestSelection(sel2)
+ }
+ }
+ if n != 2 {
+ b.Fatalf("want 2, got %d", n)
+ }
+}
+
+func BenchmarkClosestNodes(b *testing.B) {
+ var n int
+
+ b.StopTimer()
+ sel := Doc().Find(".container-fluid")
+ nodes := Doc().Find(".pvk-content").Nodes
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if n == 0 {
+ n = sel.ClosestNodes(nodes...).Length()
+ } else {
+ sel.ClosestNodes(nodes...)
+ }
+ }
+ if n != 2 {
+ b.Fatalf("want 2, got %d", n)
+ }
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/doc.go b/vendor/github.com/PuerkitoBio/goquery/doc.go
new file mode 100644
index 0000000..71146a7
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/doc.go
@@ -0,0 +1,123 @@
+// Copyright (c) 2012-2016, Martin Angers & Contributors
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without modification,
+// are permitted provided that the following conditions are met:
+//
+// * Redistributions of source code must retain the above copyright notice,
+// this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation and/or
+// other materials provided with the distribution.
+// * Neither the name of the author nor the names of its contributors may be used to
+// endorse or promote products derived from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
+// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
+// WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+/*
+Package goquery implements features similar to jQuery, including the chainable
+syntax, to manipulate and query an HTML document.
+
+It brings a syntax and a set of features similar to jQuery to the Go language.
+It is based on Go's net/html package and the CSS Selector library cascadia.
+Since the net/html parser returns nodes, and not a full-featured DOM
+tree, jQuery's stateful manipulation functions (like height(), css(), detach())
+have been left off.
+
+Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is
+the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML.
+See the repository's wiki for various options on how to do this.
+
+Syntax-wise, it is as close as possible to jQuery, with the same method names when
+possible, and that warm and fuzzy chainable interface. jQuery being the
+ultra-popular library that it is, writing a similar HTML-manipulating
+library was better to follow its API than to start anew (in the same spirit as
+Go's fmt package), even though some of its methods are less than intuitive (looking
+at you, index()...).
+
+It is hosted on GitHub, along with additional documentation in the README.md
+file: https://github.com/puerkitobio/goquery
+
+Please note that because of the net/html dependency, goquery requires Go1.1+.
+
+The various methods are split into files based on the category of behavior.
+The three dots (...) indicate that various "overloads" are available.
+
+* array.go : array-like positional manipulation of the selection.
+ - Eq()
+ - First()
+ - Get()
+ - Index...()
+ - Last()
+ - Slice()
+
+* expand.go : methods that expand or augment the selection's set.
+ - Add...()
+ - AndSelf()
+ - Union(), which is an alias for AddSelection()
+
+* filter.go : filtering methods, that reduce the selection's set.
+ - End()
+ - Filter...()
+ - Has...()
+ - Intersection(), which is an alias of FilterSelection()
+ - Not...()
+
+* iteration.go : methods to loop over the selection's nodes.
+ - Each()
+ - EachWithBreak()
+ - Map()
+
+* manipulation.go : methods for modifying the document
+ - After...()
+ - Append...()
+ - Before...()
+ - Clone()
+ - Empty()
+ - Prepend...()
+ - Remove...()
+ - ReplaceWith...()
+ - Unwrap()
+ - Wrap...()
+ - WrapAll...()
+ - WrapInner...()
+
+* property.go : methods that inspect and get the node's properties values.
+ - Attr*(), RemoveAttr(), SetAttr()
+ - AddClass(), HasClass(), RemoveClass(), ToggleClass()
+ - Html()
+ - Length()
+ - Size(), which is an alias for Length()
+ - Text()
+
+* query.go : methods that query, or reflect, a node's identity.
+ - Contains()
+ - Is...()
+
+* traversal.go : methods to traverse the HTML document tree.
+ - Children...()
+ - Contents()
+ - Find...()
+ - Next...()
+ - Parent[s]...()
+ - Prev...()
+ - Siblings...()
+
+* type.go : definition of the types exposed by goquery.
+ - Document
+ - Selection
+ - Matcher
+
+* utilities.go : definition of helper functions (and not methods on a *Selection)
+that are not part of jQuery, but are useful to goquery.
+ - NodeName
+ - OuterHtml
+*/
+package goquery
diff --git a/vendor/github.com/PuerkitoBio/goquery/example_test.go b/vendor/github.com/PuerkitoBio/goquery/example_test.go
new file mode 100644
index 0000000..bc97f05
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/example_test.go
@@ -0,0 +1,82 @@
+package goquery_test
+
+import (
+ "fmt"
+ "log"
+ "net/http"
+ "os"
+ "strings"
+
+ "github.com/PuerkitoBio/goquery"
+)
+
+// This example scrapes the reviews shown on the home page of metalsucks.net.
+func Example() {
+ // Request the HTML page.
+ res, err := http.Get("http://metalsucks.net")
+ if err != nil {
+ log.Fatal(err)
+ }
+ defer res.Body.Close()
+ if res.StatusCode != 200 {
+ log.Fatalf("status code error: %d %s", res.StatusCode, res.Status)
+ }
+
+ // Load the HTML document
+ doc, err := goquery.NewDocumentFromReader(res.Body)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Find the review items
+ doc.Find(".sidebar-reviews article .content-block").Each(func(i int, s *goquery.Selection) {
+ // For each item found, get the band and title
+ band := s.Find("a").Text()
+ title := s.Find("i").Text()
+ fmt.Printf("Review %d: %s - %s\n", i, band, title)
+ })
+ // To see the output of the Example while running the test suite (go test), simply
+ // remove the leading "x" before Output on the next line. This will cause the
+ // example to fail (all the "real" tests should pass).
+
+ // xOutput: voluntarily fail the Example output.
+}
+
+// This example shows how to use NewDocumentFromReader from a file.
+func ExampleNewDocumentFromReader_file() {
+ // create from a file
+ f, err := os.Open("some/file.html")
+ if err != nil {
+ log.Fatal(err)
+ }
+ defer f.Close()
+ doc, err := goquery.NewDocumentFromReader(f)
+ if err != nil {
+ log.Fatal(err)
+ }
+ // use the goquery document...
+ _ = doc.Find("h1")
+}
+
+// This example shows how to use NewDocumentFromReader from a string.
+func ExampleNewDocumentFromReader_string() {
+ // create from a string
+ data := `
+
+
+ My document
+
+
+
Header
+
+`
+
+ doc, err := goquery.NewDocumentFromReader(strings.NewReader(data))
+ if err != nil {
+ log.Fatal(err)
+ }
+ header := doc.Find("h1").Text()
+ fmt.Println(header)
+
+ // Output: Header
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/expand.go b/vendor/github.com/PuerkitoBio/goquery/expand.go
new file mode 100644
index 0000000..7caade5
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/expand.go
@@ -0,0 +1,70 @@
+package goquery
+
+import "golang.org/x/net/html"
+
+// Add adds the selector string's matching nodes to those in the current
+// selection and returns a new Selection object.
+// The selector string is run in the context of the document of the current
+// Selection object.
+func (s *Selection) Add(selector string) *Selection {
+ return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, compileMatcher(selector))...)
+}
+
+// AddMatcher adds the matcher's matching nodes to those in the current
+// selection and returns a new Selection object.
+// The matcher is run in the context of the document of the current
+// Selection object.
+func (s *Selection) AddMatcher(m Matcher) *Selection {
+ return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, m)...)
+}
+
+// AddSelection adds the specified Selection object's nodes to those in the
+// current selection and returns a new Selection object.
+func (s *Selection) AddSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return s.AddNodes()
+ }
+ return s.AddNodes(sel.Nodes...)
+}
+
+// Union is an alias for AddSelection.
+func (s *Selection) Union(sel *Selection) *Selection {
+ return s.AddSelection(sel)
+}
+
+// AddNodes adds the specified nodes to those in the
+// current selection and returns a new Selection object.
+func (s *Selection) AddNodes(nodes ...*html.Node) *Selection {
+ return pushStack(s, appendWithoutDuplicates(s.Nodes, nodes, nil))
+}
+
+// AndSelf adds the previous set of elements on the stack to the current set.
+// It returns a new Selection object containing the current Selection combined
+// with the previous one.
+// Deprecated: This function has been deprecated and is now an alias for AddBack().
+func (s *Selection) AndSelf() *Selection {
+ return s.AddBack()
+}
+
+// AddBack adds the previous set of elements on the stack to the current set.
+// It returns a new Selection object containing the current Selection combined
+// with the previous one.
+func (s *Selection) AddBack() *Selection {
+ return s.AddSelection(s.prevSel)
+}
+
+// AddBackFiltered reduces the previous set of elements on the stack to those that
+// match the selector string, and adds them to the current set.
+// It returns a new Selection object containing the current Selection combined
+// with the filtered previous one
+func (s *Selection) AddBackFiltered(selector string) *Selection {
+ return s.AddSelection(s.prevSel.Filter(selector))
+}
+
+// AddBackMatcher reduces the previous set of elements on the stack to those that match
+// the mateher, and adds them to the curernt set.
+// It returns a new Selection object containing the current Selection combined
+// with the filtered previous one
+func (s *Selection) AddBackMatcher(m Matcher) *Selection {
+ return s.AddSelection(s.prevSel.FilterMatcher(m))
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/expand_test.go b/vendor/github.com/PuerkitoBio/goquery/expand_test.go
new file mode 100644
index 0000000..c034dc6
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/expand_test.go
@@ -0,0 +1,118 @@
+package goquery
+
+import (
+ "testing"
+)
+
+func TestAdd(t *testing.T) {
+ sel := Doc().Find("div.row-fluid").Add("a")
+ assertLength(t, sel.Nodes, 19)
+}
+
+func TestAddInvalid(t *testing.T) {
+ sel1 := Doc().Find("div.row-fluid")
+ sel2 := sel1.Add("")
+ assertLength(t, sel1.Nodes, 9)
+ assertLength(t, sel2.Nodes, 9)
+ if sel1 == sel2 {
+ t.Errorf("selections should not be the same")
+ }
+}
+
+func TestAddRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.Add("a").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestAddSelection(t *testing.T) {
+ sel := Doc().Find("div.row-fluid")
+ sel2 := Doc().Find("a")
+ sel = sel.AddSelection(sel2)
+ assertLength(t, sel.Nodes, 19)
+}
+
+func TestAddSelectionNil(t *testing.T) {
+ sel := Doc().Find("div.row-fluid")
+ assertLength(t, sel.Nodes, 9)
+
+ sel = sel.AddSelection(nil)
+ assertLength(t, sel.Nodes, 9)
+}
+
+func TestAddSelectionRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.Find("a")
+ sel2 = sel.AddSelection(sel2).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestAddNodes(t *testing.T) {
+ sel := Doc().Find("div.pvk-gutter")
+ sel2 := Doc().Find(".pvk-content")
+ sel = sel.AddNodes(sel2.Nodes...)
+ assertLength(t, sel.Nodes, 9)
+}
+
+func TestAddNodesNone(t *testing.T) {
+ sel := Doc().Find("div.pvk-gutter").AddNodes()
+ assertLength(t, sel.Nodes, 6)
+}
+
+func TestAddNodesRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.Find("a")
+ sel2 = sel.AddNodes(sel2.Nodes...).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestAddNodesBig(t *testing.T) {
+ doc := DocW()
+ sel := doc.Find("li")
+ assertLength(t, sel.Nodes, 373)
+ sel2 := doc.Find("xyz")
+ assertLength(t, sel2.Nodes, 0)
+
+ nodes := sel.Nodes
+ sel2 = sel2.AddNodes(nodes...)
+ assertLength(t, sel2.Nodes, 373)
+ nodes2 := append(nodes, nodes...)
+ sel2 = sel2.End().AddNodes(nodes2...)
+ assertLength(t, sel2.Nodes, 373)
+ nodes3 := append(nodes2, nodes...)
+ sel2 = sel2.End().AddNodes(nodes3...)
+ assertLength(t, sel2.Nodes, 373)
+}
+
+func TestAndSelf(t *testing.T) {
+ sel := Doc().Find(".span12").Last().AndSelf()
+ assertLength(t, sel.Nodes, 2)
+}
+
+func TestAndSelfRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.Find("a").AndSelf().End().End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestAddBack(t *testing.T) {
+ sel := Doc().Find(".span12").Last().AddBack()
+ assertLength(t, sel.Nodes, 2)
+}
+
+func TestAddBackRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.Find("a").AddBack().End().End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestAddBackFiltered(t *testing.T) {
+ sel := Doc().Find(".span12, .footer").Find("h1").AddBackFiltered(".footer")
+ assertLength(t, sel.Nodes, 2)
+}
+
+func TestAddBackFilteredRollback(t *testing.T) {
+ sel := Doc().Find(".span12, .footer")
+ sel2 := sel.Find("h1").AddBackFiltered(".footer").End().End()
+ assertEqual(t, sel, sel2)
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/filter.go b/vendor/github.com/PuerkitoBio/goquery/filter.go
new file mode 100644
index 0000000..9138ffb
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/filter.go
@@ -0,0 +1,163 @@
+package goquery
+
+import "golang.org/x/net/html"
+
+// Filter reduces the set of matched elements to those that match the selector string.
+// It returns a new Selection object for this subset of matching elements.
+func (s *Selection) Filter(selector string) *Selection {
+ return s.FilterMatcher(compileMatcher(selector))
+}
+
+// FilterMatcher reduces the set of matched elements to those that match
+// the given matcher. It returns a new Selection object for this subset
+// of matching elements.
+func (s *Selection) FilterMatcher(m Matcher) *Selection {
+ return pushStack(s, winnow(s, m, true))
+}
+
+// Not removes elements from the Selection that match the selector string.
+// It returns a new Selection object with the matching elements removed.
+func (s *Selection) Not(selector string) *Selection {
+ return s.NotMatcher(compileMatcher(selector))
+}
+
+// NotMatcher removes elements from the Selection that match the given matcher.
+// It returns a new Selection object with the matching elements removed.
+func (s *Selection) NotMatcher(m Matcher) *Selection {
+ return pushStack(s, winnow(s, m, false))
+}
+
+// FilterFunction reduces the set of matched elements to those that pass the function's test.
+// It returns a new Selection object for this subset of elements.
+func (s *Selection) FilterFunction(f func(int, *Selection) bool) *Selection {
+ return pushStack(s, winnowFunction(s, f, true))
+}
+
+// NotFunction removes elements from the Selection that pass the function's test.
+// It returns a new Selection object with the matching elements removed.
+func (s *Selection) NotFunction(f func(int, *Selection) bool) *Selection {
+ return pushStack(s, winnowFunction(s, f, false))
+}
+
+// FilterNodes reduces the set of matched elements to those that match the specified nodes.
+// It returns a new Selection object for this subset of elements.
+func (s *Selection) FilterNodes(nodes ...*html.Node) *Selection {
+ return pushStack(s, winnowNodes(s, nodes, true))
+}
+
+// NotNodes removes elements from the Selection that match the specified nodes.
+// It returns a new Selection object with the matching elements removed.
+func (s *Selection) NotNodes(nodes ...*html.Node) *Selection {
+ return pushStack(s, winnowNodes(s, nodes, false))
+}
+
+// FilterSelection reduces the set of matched elements to those that match a
+// node in the specified Selection object.
+// It returns a new Selection object for this subset of elements.
+func (s *Selection) FilterSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return pushStack(s, winnowNodes(s, nil, true))
+ }
+ return pushStack(s, winnowNodes(s, sel.Nodes, true))
+}
+
+// NotSelection removes elements from the Selection that match a node in the specified
+// Selection object. It returns a new Selection object with the matching elements removed.
+func (s *Selection) NotSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return pushStack(s, winnowNodes(s, nil, false))
+ }
+ return pushStack(s, winnowNodes(s, sel.Nodes, false))
+}
+
+// Intersection is an alias for FilterSelection.
+func (s *Selection) Intersection(sel *Selection) *Selection {
+ return s.FilterSelection(sel)
+}
+
+// Has reduces the set of matched elements to those that have a descendant
+// that matches the selector.
+// It returns a new Selection object with the matching elements.
+func (s *Selection) Has(selector string) *Selection {
+ return s.HasSelection(s.document.Find(selector))
+}
+
+// HasMatcher reduces the set of matched elements to those that have a descendant
+// that matches the matcher.
+// It returns a new Selection object with the matching elements.
+func (s *Selection) HasMatcher(m Matcher) *Selection {
+ return s.HasSelection(s.document.FindMatcher(m))
+}
+
+// HasNodes reduces the set of matched elements to those that have a
+// descendant that matches one of the nodes.
+// It returns a new Selection object with the matching elements.
+func (s *Selection) HasNodes(nodes ...*html.Node) *Selection {
+ return s.FilterFunction(func(_ int, sel *Selection) bool {
+ // Add all nodes that contain one of the specified nodes
+ for _, n := range nodes {
+ if sel.Contains(n) {
+ return true
+ }
+ }
+ return false
+ })
+}
+
+// HasSelection reduces the set of matched elements to those that have a
+// descendant that matches one of the nodes of the specified Selection object.
+// It returns a new Selection object with the matching elements.
+func (s *Selection) HasSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return s.HasNodes()
+ }
+ return s.HasNodes(sel.Nodes...)
+}
+
+// End ends the most recent filtering operation in the current chain and
+// returns the set of matched elements to its previous state.
+func (s *Selection) End() *Selection {
+ if s.prevSel != nil {
+ return s.prevSel
+ }
+ return newEmptySelection(s.document)
+}
+
+// Filter based on the matcher, and the indicator to keep (Filter) or
+// to get rid of (Not) the matching elements.
+func winnow(sel *Selection, m Matcher, keep bool) []*html.Node {
+ // Optimize if keep is requested
+ if keep {
+ return m.Filter(sel.Nodes)
+ }
+ // Use grep
+ return grep(sel, func(i int, s *Selection) bool {
+ return !m.Match(s.Get(0))
+ })
+}
+
+// Filter based on an array of nodes, and the indicator to keep (Filter) or
+// to get rid of (Not) the matching elements.
+func winnowNodes(sel *Selection, nodes []*html.Node, keep bool) []*html.Node {
+ if len(nodes)+len(sel.Nodes) < minNodesForSet {
+ return grep(sel, func(i int, s *Selection) bool {
+ return isInSlice(nodes, s.Get(0)) == keep
+ })
+ }
+
+ set := make(map[*html.Node]bool)
+ for _, n := range nodes {
+ set[n] = true
+ }
+ return grep(sel, func(i int, s *Selection) bool {
+ return set[s.Get(0)] == keep
+ })
+}
+
+// Filter based on a function test, and the indicator to keep (Filter) or
+// to get rid of (Not) the matching elements.
+func winnowFunction(sel *Selection, f func(int, *Selection) bool, keep bool) []*html.Node {
+ return grep(sel, func(i int, s *Selection) bool {
+ return f(i, s) == keep
+ })
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/filter_test.go b/vendor/github.com/PuerkitoBio/goquery/filter_test.go
new file mode 100644
index 0000000..f663c08
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/filter_test.go
@@ -0,0 +1,206 @@
+package goquery
+
+import (
+ "testing"
+)
+
+func TestFilter(t *testing.T) {
+ sel := Doc().Find(".span12").Filter(".alert")
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestFilterNone(t *testing.T) {
+ sel := Doc().Find(".span12").Filter(".zzalert")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestFilterInvalid(t *testing.T) {
+ sel := Doc().Find(".span12").Filter("")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestFilterRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.Filter(".alert").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestFilterFunction(t *testing.T) {
+ sel := Doc().Find(".pvk-content").FilterFunction(func(i int, s *Selection) bool {
+ return i > 0
+ })
+ assertLength(t, sel.Nodes, 2)
+}
+
+func TestFilterFunctionRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.FilterFunction(func(i int, s *Selection) bool {
+ return i > 0
+ }).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestFilterNode(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.FilterNodes(sel.Nodes[2])
+ assertLength(t, sel2.Nodes, 1)
+}
+
+func TestFilterNodeRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.FilterNodes(sel.Nodes[2]).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestFilterSelection(t *testing.T) {
+ sel := Doc().Find(".link")
+ sel2 := Doc().Find("a[ng-click]")
+ sel3 := sel.FilterSelection(sel2)
+ assertLength(t, sel3.Nodes, 1)
+}
+
+func TestFilterSelectionRollback(t *testing.T) {
+ sel := Doc().Find(".link")
+ sel2 := Doc().Find("a[ng-click]")
+ sel2 = sel.FilterSelection(sel2).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestFilterSelectionNil(t *testing.T) {
+ var sel2 *Selection
+
+ sel := Doc().Find(".link")
+ sel3 := sel.FilterSelection(sel2)
+ assertLength(t, sel3.Nodes, 0)
+}
+
+func TestNot(t *testing.T) {
+ sel := Doc().Find(".span12").Not(".alert")
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestNotInvalid(t *testing.T) {
+ sel := Doc().Find(".span12").Not("")
+ assertLength(t, sel.Nodes, 2)
+}
+
+func TestNotRollback(t *testing.T) {
+ sel := Doc().Find(".span12")
+ sel2 := sel.Not(".alert").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestNotNone(t *testing.T) {
+ sel := Doc().Find(".span12").Not(".zzalert")
+ assertLength(t, sel.Nodes, 2)
+}
+
+func TestNotFunction(t *testing.T) {
+ sel := Doc().Find(".pvk-content").NotFunction(func(i int, s *Selection) bool {
+ return i > 0
+ })
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestNotFunctionRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.NotFunction(func(i int, s *Selection) bool {
+ return i > 0
+ }).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestNotNode(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.NotNodes(sel.Nodes[2])
+ assertLength(t, sel2.Nodes, 2)
+}
+
+func TestNotNodeRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.NotNodes(sel.Nodes[2]).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestNotSelection(t *testing.T) {
+ sel := Doc().Find(".link")
+ sel2 := Doc().Find("a[ng-click]")
+ sel3 := sel.NotSelection(sel2)
+ assertLength(t, sel3.Nodes, 6)
+}
+
+func TestNotSelectionRollback(t *testing.T) {
+ sel := Doc().Find(".link")
+ sel2 := Doc().Find("a[ng-click]")
+ sel2 = sel.NotSelection(sel2).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestIntersection(t *testing.T) {
+ sel := Doc().Find(".pvk-gutter")
+ sel2 := Doc().Find("div").Intersection(sel)
+ assertLength(t, sel2.Nodes, 6)
+}
+
+func TestIntersectionRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-gutter")
+ sel2 := Doc().Find("div")
+ sel2 = sel.Intersection(sel2).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestHas(t *testing.T) {
+ sel := Doc().Find(".container-fluid").Has(".center-content")
+ assertLength(t, sel.Nodes, 2)
+ // Has() returns the high-level .container-fluid div, and the one that is the immediate parent of center-content
+}
+
+func TestHasInvalid(t *testing.T) {
+ sel := Doc().Find(".container-fluid").Has("")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestHasRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := sel.Has(".center-content").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestHasNodes(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := Doc().Find(".center-content")
+ sel = sel.HasNodes(sel2.Nodes...)
+ assertLength(t, sel.Nodes, 2)
+ // Has() returns the high-level .container-fluid div, and the one that is the immediate parent of center-content
+}
+
+func TestHasNodesRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := Doc().Find(".center-content")
+ sel2 = sel.HasNodes(sel2.Nodes...).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestHasSelection(t *testing.T) {
+ sel := Doc().Find("p")
+ sel2 := Doc().Find("small")
+ sel = sel.HasSelection(sel2)
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestHasSelectionRollback(t *testing.T) {
+ sel := Doc().Find("p")
+ sel2 := Doc().Find("small")
+ sel2 = sel.HasSelection(sel2).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestEnd(t *testing.T) {
+ sel := Doc().Find("p").Has("small").End()
+ assertLength(t, sel.Nodes, 4)
+}
+
+func TestEndToTop(t *testing.T) {
+ sel := Doc().Find("p").Has("small").End().End().End()
+ assertLength(t, sel.Nodes, 0)
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/go.mod b/vendor/github.com/PuerkitoBio/goquery/go.mod
new file mode 100644
index 0000000..2fa1332
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/go.mod
@@ -0,0 +1,6 @@
+module github.com/PuerkitoBio/goquery
+
+require (
+ github.com/andybalholm/cascadia v1.0.0
+ golang.org/x/net v0.0.0-20181114220301-adae6a3d119a
+)
diff --git a/vendor/github.com/PuerkitoBio/goquery/go.sum b/vendor/github.com/PuerkitoBio/goquery/go.sum
new file mode 100644
index 0000000..11c5757
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/go.sum
@@ -0,0 +1,5 @@
+github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRySc45o=
+github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
+golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20181114220301-adae6a3d119a h1:gOpx8G595UYyvj8UK4+OFyY4rx037g3fmfhe5SasG3U=
+golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
diff --git a/vendor/github.com/PuerkitoBio/goquery/iteration.go b/vendor/github.com/PuerkitoBio/goquery/iteration.go
new file mode 100644
index 0000000..e246f2e
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/iteration.go
@@ -0,0 +1,39 @@
+package goquery
+
+// Each iterates over a Selection object, executing a function for each
+// matched element. It returns the current Selection object. The function
+// f is called for each element in the selection with the index of the
+// element in that selection starting at 0, and a *Selection that contains
+// only that element.
+func (s *Selection) Each(f func(int, *Selection)) *Selection {
+ for i, n := range s.Nodes {
+ f(i, newSingleSelection(n, s.document))
+ }
+ return s
+}
+
+// EachWithBreak iterates over a Selection object, executing a function for each
+// matched element. It is identical to Each except that it is possible to break
+// out of the loop by returning false in the callback function. It returns the
+// current Selection object.
+func (s *Selection) EachWithBreak(f func(int, *Selection) bool) *Selection {
+ for i, n := range s.Nodes {
+ if !f(i, newSingleSelection(n, s.document)) {
+ return s
+ }
+ }
+ return s
+}
+
+// Map passes each element in the current matched set through a function,
+// producing a slice of string holding the returned values. The function
+// f is called for each element in the selection with the index of the
+// element in that selection starting at 0, and a *Selection that contains
+// only that element.
+func (s *Selection) Map(f func(int, *Selection) string) (result []string) {
+ for i, n := range s.Nodes {
+ result = append(result, f(i, newSingleSelection(n, s.document)))
+ }
+
+ return result
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/iteration_test.go b/vendor/github.com/PuerkitoBio/goquery/iteration_test.go
new file mode 100644
index 0000000..9b6aafb
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/iteration_test.go
@@ -0,0 +1,88 @@
+package goquery
+
+import (
+ "testing"
+
+ "golang.org/x/net/html"
+)
+
+func TestEach(t *testing.T) {
+ var cnt int
+
+ sel := Doc().Find(".hero-unit .row-fluid").Each(func(i int, n *Selection) {
+ cnt++
+ t.Logf("At index %v, node %v", i, n.Nodes[0].Data)
+ }).Find("a")
+
+ if cnt != 4 {
+ t.Errorf("Expected Each() to call function 4 times, got %v times.", cnt)
+ }
+ assertLength(t, sel.Nodes, 6)
+}
+
+func TestEachWithBreak(t *testing.T) {
+ var cnt int
+
+ sel := Doc().Find(".hero-unit .row-fluid").EachWithBreak(func(i int, n *Selection) bool {
+ cnt++
+ t.Logf("At index %v, node %v", i, n.Nodes[0].Data)
+ return false
+ }).Find("a")
+
+ if cnt != 1 {
+ t.Errorf("Expected Each() to call function 1 time, got %v times.", cnt)
+ }
+ assertLength(t, sel.Nodes, 6)
+}
+
+func TestEachEmptySelection(t *testing.T) {
+ var cnt int
+
+ sel := Doc().Find("zzzz")
+ sel.Each(func(i int, n *Selection) {
+ cnt++
+ })
+ if cnt > 0 {
+ t.Error("Expected Each() to not be called on empty Selection.")
+ }
+ sel2 := sel.Find("div")
+ assertLength(t, sel2.Nodes, 0)
+}
+
+func TestMap(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ vals := sel.Map(func(i int, s *Selection) string {
+ n := s.Get(0)
+ if n.Type == html.ElementNode {
+ return n.Data
+ }
+ return ""
+ })
+ for _, v := range vals {
+ if v != "div" {
+ t.Error("Expected Map array result to be all 'div's.")
+ }
+ }
+ if len(vals) != 3 {
+ t.Errorf("Expected Map array result to have a length of 3, found %v.", len(vals))
+ }
+}
+
+func TestForRange(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ initLen := sel.Length()
+ for i := range sel.Nodes {
+ single := sel.Eq(i)
+ //h, err := single.Html()
+ //if err != nil {
+ // t.Fatal(err)
+ //}
+ //fmt.Println(i, h)
+ if single.Length() != 1 {
+ t.Errorf("%d: expected length of 1, got %d", i, single.Length())
+ }
+ }
+ if sel.Length() != initLen {
+ t.Errorf("expected initial selection to still have length %d, got %d", initLen, sel.Length())
+ }
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/manipulation.go b/vendor/github.com/PuerkitoBio/goquery/manipulation.go
new file mode 100644
index 0000000..34eb757
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/manipulation.go
@@ -0,0 +1,574 @@
+package goquery
+
+import (
+ "strings"
+
+ "golang.org/x/net/html"
+)
+
+// After applies the selector from the root document and inserts the matched elements
+// after the elements in the set of matched elements.
+//
+// If one of the matched elements in the selection is not currently in the
+// document, it's impossible to insert nodes after it, so it will be ignored.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) After(selector string) *Selection {
+ return s.AfterMatcher(compileMatcher(selector))
+}
+
+// AfterMatcher applies the matcher from the root document and inserts the matched elements
+// after the elements in the set of matched elements.
+//
+// If one of the matched elements in the selection is not currently in the
+// document, it's impossible to insert nodes after it, so it will be ignored.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) AfterMatcher(m Matcher) *Selection {
+ return s.AfterNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// AfterSelection inserts the elements in the selection after each element in the set of matched
+// elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) AfterSelection(sel *Selection) *Selection {
+ return s.AfterNodes(sel.Nodes...)
+}
+
+// AfterHtml parses the html and inserts it after the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) AfterHtml(html string) *Selection {
+ return s.AfterNodes(parseHtml(html)...)
+}
+
+// AfterNodes inserts the nodes after each element in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) AfterNodes(ns ...*html.Node) *Selection {
+ return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) {
+ if sn.Parent != nil {
+ sn.Parent.InsertBefore(n, sn.NextSibling)
+ }
+ })
+}
+
+// Append appends the elements specified by the selector to the end of each element
+// in the set of matched elements, following those rules:
+//
+// 1) The selector is applied to the root document.
+//
+// 2) Elements that are part of the document will be moved to the new location.
+//
+// 3) If there are multiple locations to append to, cloned nodes will be
+// appended to all target locations except the last one, which will be moved
+// as noted in (2).
+func (s *Selection) Append(selector string) *Selection {
+ return s.AppendMatcher(compileMatcher(selector))
+}
+
+// AppendMatcher appends the elements specified by the matcher to the end of each element
+// in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) AppendMatcher(m Matcher) *Selection {
+ return s.AppendNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// AppendSelection appends the elements in the selection to the end of each element
+// in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) AppendSelection(sel *Selection) *Selection {
+ return s.AppendNodes(sel.Nodes...)
+}
+
+// AppendHtml parses the html and appends it to the set of matched elements.
+func (s *Selection) AppendHtml(html string) *Selection {
+ return s.AppendNodes(parseHtml(html)...)
+}
+
+// AppendNodes appends the specified nodes to each node in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) AppendNodes(ns ...*html.Node) *Selection {
+ return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) {
+ sn.AppendChild(n)
+ })
+}
+
+// Before inserts the matched elements before each element in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) Before(selector string) *Selection {
+ return s.BeforeMatcher(compileMatcher(selector))
+}
+
+// BeforeMatcher inserts the matched elements before each element in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) BeforeMatcher(m Matcher) *Selection {
+ return s.BeforeNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// BeforeSelection inserts the elements in the selection before each element in the set of matched
+// elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) BeforeSelection(sel *Selection) *Selection {
+ return s.BeforeNodes(sel.Nodes...)
+}
+
+// BeforeHtml parses the html and inserts it before the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) BeforeHtml(html string) *Selection {
+ return s.BeforeNodes(parseHtml(html)...)
+}
+
+// BeforeNodes inserts the nodes before each element in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) BeforeNodes(ns ...*html.Node) *Selection {
+ return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) {
+ if sn.Parent != nil {
+ sn.Parent.InsertBefore(n, sn)
+ }
+ })
+}
+
+// Clone creates a deep copy of the set of matched nodes. The new nodes will not be
+// attached to the document.
+func (s *Selection) Clone() *Selection {
+ ns := newEmptySelection(s.document)
+ ns.Nodes = cloneNodes(s.Nodes)
+ return ns
+}
+
+// Empty removes all children nodes from the set of matched elements.
+// It returns the children nodes in a new Selection.
+func (s *Selection) Empty() *Selection {
+ var nodes []*html.Node
+
+ for _, n := range s.Nodes {
+ for c := n.FirstChild; c != nil; c = n.FirstChild {
+ n.RemoveChild(c)
+ nodes = append(nodes, c)
+ }
+ }
+
+ return pushStack(s, nodes)
+}
+
+// Prepend prepends the elements specified by the selector to each element in
+// the set of matched elements, following the same rules as Append.
+func (s *Selection) Prepend(selector string) *Selection {
+ return s.PrependMatcher(compileMatcher(selector))
+}
+
+// PrependMatcher prepends the elements specified by the matcher to each
+// element in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) PrependMatcher(m Matcher) *Selection {
+ return s.PrependNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// PrependSelection prepends the elements in the selection to each element in
+// the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) PrependSelection(sel *Selection) *Selection {
+ return s.PrependNodes(sel.Nodes...)
+}
+
+// PrependHtml parses the html and prepends it to the set of matched elements.
+func (s *Selection) PrependHtml(html string) *Selection {
+ return s.PrependNodes(parseHtml(html)...)
+}
+
+// PrependNodes prepends the specified nodes to each node in the set of
+// matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) PrependNodes(ns ...*html.Node) *Selection {
+ return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) {
+ // sn.FirstChild may be nil, in which case this functions like
+ // sn.AppendChild()
+ sn.InsertBefore(n, sn.FirstChild)
+ })
+}
+
+// Remove removes the set of matched elements from the document.
+// It returns the same selection, now consisting of nodes not in the document.
+func (s *Selection) Remove() *Selection {
+ for _, n := range s.Nodes {
+ if n.Parent != nil {
+ n.Parent.RemoveChild(n)
+ }
+ }
+
+ return s
+}
+
+// RemoveFiltered removes the set of matched elements by selector.
+// It returns the Selection of removed nodes.
+func (s *Selection) RemoveFiltered(selector string) *Selection {
+ return s.RemoveMatcher(compileMatcher(selector))
+}
+
+// RemoveMatcher removes the set of matched elements.
+// It returns the Selection of removed nodes.
+func (s *Selection) RemoveMatcher(m Matcher) *Selection {
+ return s.FilterMatcher(m).Remove()
+}
+
+// ReplaceWith replaces each element in the set of matched elements with the
+// nodes matched by the given selector.
+// It returns the removed elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) ReplaceWith(selector string) *Selection {
+ return s.ReplaceWithMatcher(compileMatcher(selector))
+}
+
+// ReplaceWithMatcher replaces each element in the set of matched elements with
+// the nodes matched by the given Matcher.
+// It returns the removed elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) ReplaceWithMatcher(m Matcher) *Selection {
+ return s.ReplaceWithNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// ReplaceWithSelection replaces each element in the set of matched elements with
+// the nodes from the given Selection.
+// It returns the removed elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) ReplaceWithSelection(sel *Selection) *Selection {
+ return s.ReplaceWithNodes(sel.Nodes...)
+}
+
+// ReplaceWithHtml replaces each element in the set of matched elements with
+// the parsed HTML.
+// It returns the removed elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) ReplaceWithHtml(html string) *Selection {
+ return s.ReplaceWithNodes(parseHtml(html)...)
+}
+
+// ReplaceWithNodes replaces each element in the set of matched elements with
+// the given nodes.
+// It returns the removed elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) ReplaceWithNodes(ns ...*html.Node) *Selection {
+ s.AfterNodes(ns...)
+ return s.Remove()
+}
+
+// SetHtml sets the html content of each element in the selection to
+// specified html string.
+func (s *Selection) SetHtml(html string) *Selection {
+ return setHtmlNodes(s, parseHtml(html)...)
+}
+
+// SetText sets the content of each element in the selection to specified content.
+// The provided text string is escaped.
+func (s *Selection) SetText(text string) *Selection {
+ return s.SetHtml(html.EscapeString(text))
+}
+
+// Unwrap removes the parents of the set of matched elements, leaving the matched
+// elements (and their siblings, if any) in their place.
+// It returns the original selection.
+func (s *Selection) Unwrap() *Selection {
+ s.Parent().Each(func(i int, ss *Selection) {
+ // For some reason, jquery allows unwrap to remove the element, so
+ // allowing it here too. Same for . Why it allows those elements to
+ // be unwrapped while not allowing body is a mystery to me.
+ if ss.Nodes[0].Data != "body" {
+ ss.ReplaceWithSelection(ss.Contents())
+ }
+ })
+
+ return s
+}
+
+// Wrap wraps each element in the set of matched elements inside the first
+// element matched by the given selector. The matched child is cloned before
+// being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) Wrap(selector string) *Selection {
+ return s.WrapMatcher(compileMatcher(selector))
+}
+
+// WrapMatcher wraps each element in the set of matched elements inside the
+// first element matched by the given matcher. The matched child is cloned
+// before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapMatcher(m Matcher) *Selection {
+ return s.wrapNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// WrapSelection wraps each element in the set of matched elements inside the
+// first element in the given Selection. The element is cloned before being
+// inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapSelection(sel *Selection) *Selection {
+ return s.wrapNodes(sel.Nodes...)
+}
+
+// WrapHtml wraps each element in the set of matched elements inside the inner-
+// most child of the given HTML.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapHtml(html string) *Selection {
+ return s.wrapNodes(parseHtml(html)...)
+}
+
+// WrapNode wraps each element in the set of matched elements inside the inner-
+// most child of the given node. The given node is copied before being inserted
+// into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapNode(n *html.Node) *Selection {
+ return s.wrapNodes(n)
+}
+
+func (s *Selection) wrapNodes(ns ...*html.Node) *Selection {
+ s.Each(func(i int, ss *Selection) {
+ ss.wrapAllNodes(ns...)
+ })
+
+ return s
+}
+
+// WrapAll wraps a single HTML structure, matched by the given selector, around
+// all elements in the set of matched elements. The matched child is cloned
+// before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapAll(selector string) *Selection {
+ return s.WrapAllMatcher(compileMatcher(selector))
+}
+
+// WrapAllMatcher wraps a single HTML structure, matched by the given Matcher,
+// around all elements in the set of matched elements. The matched child is
+// cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapAllMatcher(m Matcher) *Selection {
+ return s.wrapAllNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// WrapAllSelection wraps a single HTML structure, the first node of the given
+// Selection, around all elements in the set of matched elements. The matched
+// child is cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapAllSelection(sel *Selection) *Selection {
+ return s.wrapAllNodes(sel.Nodes...)
+}
+
+// WrapAllHtml wraps the given HTML structure around all elements in the set of
+// matched elements. The matched child is cloned before being inserted into the
+// document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapAllHtml(html string) *Selection {
+ return s.wrapAllNodes(parseHtml(html)...)
+}
+
+func (s *Selection) wrapAllNodes(ns ...*html.Node) *Selection {
+ if len(ns) > 0 {
+ return s.WrapAllNode(ns[0])
+ }
+ return s
+}
+
+// WrapAllNode wraps the given node around the first element in the Selection,
+// making all other nodes in the Selection children of the given node. The node
+// is cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapAllNode(n *html.Node) *Selection {
+ if s.Size() == 0 {
+ return s
+ }
+
+ wrap := cloneNode(n)
+
+ first := s.Nodes[0]
+ if first.Parent != nil {
+ first.Parent.InsertBefore(wrap, first)
+ first.Parent.RemoveChild(first)
+ }
+
+ for c := getFirstChildEl(wrap); c != nil; c = getFirstChildEl(wrap) {
+ wrap = c
+ }
+
+ newSingleSelection(wrap, s.document).AppendSelection(s)
+
+ return s
+}
+
+// WrapInner wraps an HTML structure, matched by the given selector, around the
+// content of element in the set of matched elements. The matched child is
+// cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapInner(selector string) *Selection {
+ return s.WrapInnerMatcher(compileMatcher(selector))
+}
+
+// WrapInnerMatcher wraps an HTML structure, matched by the given selector,
+// around the content of element in the set of matched elements. The matched
+// child is cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapInnerMatcher(m Matcher) *Selection {
+ return s.wrapInnerNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// WrapInnerSelection wraps an HTML structure, matched by the given selector,
+// around the content of element in the set of matched elements. The matched
+// child is cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapInnerSelection(sel *Selection) *Selection {
+ return s.wrapInnerNodes(sel.Nodes...)
+}
+
+// WrapInnerHtml wraps an HTML structure, matched by the given selector, around
+// the content of element in the set of matched elements. The matched child is
+// cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapInnerHtml(html string) *Selection {
+ return s.wrapInnerNodes(parseHtml(html)...)
+}
+
+// WrapInnerNode wraps an HTML structure, matched by the given selector, around
+// the content of element in the set of matched elements. The matched child is
+// cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapInnerNode(n *html.Node) *Selection {
+ return s.wrapInnerNodes(n)
+}
+
+func (s *Selection) wrapInnerNodes(ns ...*html.Node) *Selection {
+ if len(ns) == 0 {
+ return s
+ }
+
+ s.Each(func(i int, s *Selection) {
+ contents := s.Contents()
+
+ if contents.Size() > 0 {
+ contents.wrapAllNodes(ns...)
+ } else {
+ s.AppendNodes(cloneNode(ns[0]))
+ }
+ })
+
+ return s
+}
+
+func parseHtml(h string) []*html.Node {
+ // Errors are only returned when the io.Reader returns any error besides
+ // EOF, but strings.Reader never will
+ nodes, err := html.ParseFragment(strings.NewReader(h), &html.Node{Type: html.ElementNode})
+ if err != nil {
+ panic("goquery: failed to parse HTML: " + err.Error())
+ }
+ return nodes
+}
+
+func setHtmlNodes(s *Selection, ns ...*html.Node) *Selection {
+ for _, n := range s.Nodes {
+ for c := n.FirstChild; c != nil; c = n.FirstChild {
+ n.RemoveChild(c)
+ }
+ for _, c := range ns {
+ n.AppendChild(cloneNode(c))
+ }
+ }
+ return s
+}
+
+// Get the first child that is an ElementNode
+func getFirstChildEl(n *html.Node) *html.Node {
+ c := n.FirstChild
+ for c != nil && c.Type != html.ElementNode {
+ c = c.NextSibling
+ }
+ return c
+}
+
+// Deep copy a slice of nodes.
+func cloneNodes(ns []*html.Node) []*html.Node {
+ cns := make([]*html.Node, 0, len(ns))
+
+ for _, n := range ns {
+ cns = append(cns, cloneNode(n))
+ }
+
+ return cns
+}
+
+// Deep copy a node. The new node has clones of all the original node's
+// children but none of its parents or siblings.
+func cloneNode(n *html.Node) *html.Node {
+ nn := &html.Node{
+ Type: n.Type,
+ DataAtom: n.DataAtom,
+ Data: n.Data,
+ Attr: make([]html.Attribute, len(n.Attr)),
+ }
+
+ copy(nn.Attr, n.Attr)
+ for c := n.FirstChild; c != nil; c = c.NextSibling {
+ nn.AppendChild(cloneNode(c))
+ }
+
+ return nn
+}
+
+func (s *Selection) manipulateNodes(ns []*html.Node, reverse bool,
+ f func(sn *html.Node, n *html.Node)) *Selection {
+
+ lasti := s.Size() - 1
+
+ // net.Html doesn't provide document fragments for insertion, so to get
+ // things in the correct order with After() and Prepend(), the callback
+ // needs to be called on the reverse of the nodes.
+ if reverse {
+ for i, j := 0, len(ns)-1; i < j; i, j = i+1, j-1 {
+ ns[i], ns[j] = ns[j], ns[i]
+ }
+ }
+
+ for i, sn := range s.Nodes {
+ for _, n := range ns {
+ if i != lasti {
+ f(sn, cloneNode(n))
+ } else {
+ if n.Parent != nil {
+ n.Parent.RemoveChild(n)
+ }
+ f(sn, n)
+ }
+ }
+ }
+
+ return s
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/manipulation_test.go b/vendor/github.com/PuerkitoBio/goquery/manipulation_test.go
new file mode 100644
index 0000000..c5f5022
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/manipulation_test.go
@@ -0,0 +1,513 @@
+package goquery
+
+import (
+ "testing"
+)
+
+const (
+ wrapHtml = "
"
+ q.SetText(repl)
+
+ assertLength(t, doc.Find("#replace").Nodes, 0)
+ assertLength(t, doc.Find("#main, #foot").Nodes, 2)
+
+ if q.Text() != (repl + repl) {
+ t.Errorf("Expected text to be %v, found %v", (repl + repl), q.Text())
+ }
+
+ h, err := q.Html()
+ if err != nil {
+ t.Errorf("Error: %v", err)
+ }
+ esc := "<div id="replace">test</div>"
+ if h != esc {
+ t.Errorf("Expected html to be %v, found %v", esc, h)
+ }
+
+ printSel(t, doc.Selection)
+}
+
+func TestReplaceWithSelection(t *testing.T) {
+ doc := Doc2Clone()
+ sel := doc.Find("#nf6").ReplaceWithSelection(doc.Find("#nf5"))
+
+ assertSelectionIs(t, sel, "#nf6")
+ assertLength(t, doc.Find("#nf6").Nodes, 0)
+ assertLength(t, doc.Find("#nf5").Nodes, 1)
+
+ printSel(t, doc.Selection)
+}
+
+func TestUnwrap(t *testing.T) {
+ doc := Doc2Clone()
+
+ doc.Find("#nf5").Unwrap()
+ assertLength(t, doc.Find("#foot").Nodes, 0)
+ assertLength(t, doc.Find("body > #nf1").Nodes, 1)
+ assertLength(t, doc.Find("body > #nf5").Nodes, 1)
+
+ printSel(t, doc.Selection)
+
+ doc = Doc2Clone()
+
+ doc.Find("#nf5, #n1").Unwrap()
+ assertLength(t, doc.Find("#foot").Nodes, 0)
+ assertLength(t, doc.Find("#main").Nodes, 0)
+ assertLength(t, doc.Find("body > #n1").Nodes, 1)
+ assertLength(t, doc.Find("body > #nf5").Nodes, 1)
+
+ printSel(t, doc.Selection)
+}
+
+func TestUnwrapBody(t *testing.T) {
+ doc := Doc2Clone()
+
+ doc.Find("#main").Unwrap()
+ assertLength(t, doc.Find("body").Nodes, 1)
+ assertLength(t, doc.Find("body > #main").Nodes, 1)
+
+ printSel(t, doc.Selection)
+}
+
+func TestUnwrapHead(t *testing.T) {
+ doc := Doc2Clone()
+
+ doc.Find("title").Unwrap()
+ assertLength(t, doc.Find("head").Nodes, 0)
+ assertLength(t, doc.Find("head > title").Nodes, 0)
+ assertLength(t, doc.Find("title").Nodes, 1)
+
+ printSel(t, doc.Selection)
+}
+
+func TestUnwrapHtml(t *testing.T) {
+ doc := Doc2Clone()
+
+ doc.Find("head").Unwrap()
+ assertLength(t, doc.Find("html").Nodes, 0)
+ assertLength(t, doc.Find("html head").Nodes, 0)
+ assertLength(t, doc.Find("head").Nodes, 1)
+
+ printSel(t, doc.Selection)
+}
+
+func TestWrap(t *testing.T) {
+ doc := Doc2Clone()
+ doc.Find("#nf1").Wrap("#nf2")
+ nf1 := doc.Find("#foot #nf2 #nf1")
+ assertLength(t, nf1.Nodes, 1)
+
+ nf2 := doc.Find("#nf2")
+ assertLength(t, nf2.Nodes, 2)
+
+ printSel(t, doc.Selection)
+}
+
+func TestWrapEmpty(t *testing.T) {
+ doc := Doc2Clone()
+ doc.Find("#nf1").Wrap("#doesnt-exist")
+
+ origHtml, _ := Doc2().Html()
+ newHtml, _ := doc.Html()
+
+ if origHtml != newHtml {
+ t.Error("Expected the two documents to be identical.")
+ }
+
+ printSel(t, doc.Selection)
+}
+
+func TestWrapHtml(t *testing.T) {
+ doc := Doc2Clone()
+ doc.Find(".odd").WrapHtml(wrapHtml)
+ nf2 := doc.Find("#ins #nf2")
+ assertLength(t, nf2.Nodes, 1)
+ printSel(t, doc.Selection)
+}
+
+func TestWrapSelection(t *testing.T) {
+ doc := Doc2Clone()
+ doc.Find("#nf1").WrapSelection(doc.Find("#nf2"))
+ nf1 := doc.Find("#foot #nf2 #nf1")
+ assertLength(t, nf1.Nodes, 1)
+
+ nf2 := doc.Find("#nf2")
+ assertLength(t, nf2.Nodes, 2)
+
+ printSel(t, doc.Selection)
+}
+
+func TestWrapAll(t *testing.T) {
+ doc := Doc2Clone()
+ doc.Find(".odd").WrapAll("#nf1")
+ nf1 := doc.Find("#main #nf1")
+ assertLength(t, nf1.Nodes, 1)
+
+ sel := nf1.Find("#n2 ~ #n4 ~ #n6 ~ #nf2 ~ #nf4 ~ #nf6")
+ assertLength(t, sel.Nodes, 1)
+
+ printSel(t, doc.Selection)
+}
+
+func TestWrapAllHtml(t *testing.T) {
+ doc := Doc2Clone()
+ doc.Find(".odd").WrapAllHtml(wrapHtml)
+ nf1 := doc.Find("#main div#ins div p em b #n2 ~ #n4 ~ #n6 ~ #nf2 ~ #nf4 ~ #nf6")
+ assertLength(t, nf1.Nodes, 1)
+ printSel(t, doc.Selection)
+}
+
+func TestWrapInnerNoContent(t *testing.T) {
+ doc := Doc2Clone()
+ doc.Find(".one").WrapInner(".two")
+
+ twos := doc.Find(".two")
+ assertLength(t, twos.Nodes, 4)
+ assertLength(t, doc.Find(".one .two").Nodes, 2)
+
+ printSel(t, doc.Selection)
+}
+
+func TestWrapInnerWithContent(t *testing.T) {
+ doc := Doc3Clone()
+ doc.Find(".one").WrapInner(".two")
+
+ twos := doc.Find(".two")
+ assertLength(t, twos.Nodes, 4)
+ assertLength(t, doc.Find(".one .two").Nodes, 2)
+
+ printSel(t, doc.Selection)
+}
+
+func TestWrapInnerNoWrapper(t *testing.T) {
+ doc := Doc2Clone()
+ doc.Find(".one").WrapInner(".not-exist")
+
+ twos := doc.Find(".two")
+ assertLength(t, twos.Nodes, 2)
+ assertLength(t, doc.Find(".one").Nodes, 2)
+ assertLength(t, doc.Find(".one .two").Nodes, 0)
+
+ printSel(t, doc.Selection)
+}
+
+func TestWrapInnerHtml(t *testing.T) {
+ doc := Doc2Clone()
+ doc.Find("#foot").WrapInnerHtml(wrapHtml)
+
+ foot := doc.Find("#foot div#ins div p em b #nf1 ~ #nf2 ~ #nf3")
+ assertLength(t, foot.Nodes, 1)
+
+ printSel(t, doc.Selection)
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/property.go b/vendor/github.com/PuerkitoBio/goquery/property.go
new file mode 100644
index 0000000..411126d
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/property.go
@@ -0,0 +1,275 @@
+package goquery
+
+import (
+ "bytes"
+ "regexp"
+ "strings"
+
+ "golang.org/x/net/html"
+)
+
+var rxClassTrim = regexp.MustCompile("[\t\r\n]")
+
+// Attr gets the specified attribute's value for the first element in the
+// Selection. To get the value for each element individually, use a looping
+// construct such as Each or Map method.
+func (s *Selection) Attr(attrName string) (val string, exists bool) {
+ if len(s.Nodes) == 0 {
+ return
+ }
+ return getAttributeValue(attrName, s.Nodes[0])
+}
+
+// AttrOr works like Attr but returns default value if attribute is not present.
+func (s *Selection) AttrOr(attrName, defaultValue string) string {
+ if len(s.Nodes) == 0 {
+ return defaultValue
+ }
+
+ val, exists := getAttributeValue(attrName, s.Nodes[0])
+ if !exists {
+ return defaultValue
+ }
+
+ return val
+}
+
+// RemoveAttr removes the named attribute from each element in the set of matched elements.
+func (s *Selection) RemoveAttr(attrName string) *Selection {
+ for _, n := range s.Nodes {
+ removeAttr(n, attrName)
+ }
+
+ return s
+}
+
+// SetAttr sets the given attribute on each element in the set of matched elements.
+func (s *Selection) SetAttr(attrName, val string) *Selection {
+ for _, n := range s.Nodes {
+ attr := getAttributePtr(attrName, n)
+ if attr == nil {
+ n.Attr = append(n.Attr, html.Attribute{Key: attrName, Val: val})
+ } else {
+ attr.Val = val
+ }
+ }
+
+ return s
+}
+
+// Text gets the combined text contents of each element in the set of matched
+// elements, including their descendants.
+func (s *Selection) Text() string {
+ var buf bytes.Buffer
+
+ // Slightly optimized vs calling Each: no single selection object created
+ var f func(*html.Node)
+ f = func(n *html.Node) {
+ if n.Type == html.TextNode {
+ // Keep newlines and spaces, like jQuery
+ buf.WriteString(n.Data)
+ }
+ if n.FirstChild != nil {
+ for c := n.FirstChild; c != nil; c = c.NextSibling {
+ f(c)
+ }
+ }
+ }
+ for _, n := range s.Nodes {
+ f(n)
+ }
+
+ return buf.String()
+}
+
+// Size is an alias for Length.
+func (s *Selection) Size() int {
+ return s.Length()
+}
+
+// Length returns the number of elements in the Selection object.
+func (s *Selection) Length() int {
+ return len(s.Nodes)
+}
+
+// Html gets the HTML contents of the first element in the set of matched
+// elements. It includes text and comment nodes.
+func (s *Selection) Html() (ret string, e error) {
+ // Since there is no .innerHtml, the HTML content must be re-created from
+ // the nodes using html.Render.
+ var buf bytes.Buffer
+
+ if len(s.Nodes) > 0 {
+ for c := s.Nodes[0].FirstChild; c != nil; c = c.NextSibling {
+ e = html.Render(&buf, c)
+ if e != nil {
+ return
+ }
+ }
+ ret = buf.String()
+ }
+
+ return
+}
+
+// AddClass adds the given class(es) to each element in the set of matched elements.
+// Multiple class names can be specified, separated by a space or via multiple arguments.
+func (s *Selection) AddClass(class ...string) *Selection {
+ classStr := strings.TrimSpace(strings.Join(class, " "))
+
+ if classStr == "" {
+ return s
+ }
+
+ tcls := getClassesSlice(classStr)
+ for _, n := range s.Nodes {
+ curClasses, attr := getClassesAndAttr(n, true)
+ for _, newClass := range tcls {
+ if !strings.Contains(curClasses, " "+newClass+" ") {
+ curClasses += newClass + " "
+ }
+ }
+
+ setClasses(n, attr, curClasses)
+ }
+
+ return s
+}
+
+// HasClass determines whether any of the matched elements are assigned the
+// given class.
+func (s *Selection) HasClass(class string) bool {
+ class = " " + class + " "
+ for _, n := range s.Nodes {
+ classes, _ := getClassesAndAttr(n, false)
+ if strings.Contains(classes, class) {
+ return true
+ }
+ }
+ return false
+}
+
+// RemoveClass removes the given class(es) from each element in the set of matched elements.
+// Multiple class names can be specified, separated by a space or via multiple arguments.
+// If no class name is provided, all classes are removed.
+func (s *Selection) RemoveClass(class ...string) *Selection {
+ var rclasses []string
+
+ classStr := strings.TrimSpace(strings.Join(class, " "))
+ remove := classStr == ""
+
+ if !remove {
+ rclasses = getClassesSlice(classStr)
+ }
+
+ for _, n := range s.Nodes {
+ if remove {
+ removeAttr(n, "class")
+ } else {
+ classes, attr := getClassesAndAttr(n, true)
+ for _, rcl := range rclasses {
+ classes = strings.Replace(classes, " "+rcl+" ", " ", -1)
+ }
+
+ setClasses(n, attr, classes)
+ }
+ }
+
+ return s
+}
+
+// ToggleClass adds or removes the given class(es) for each element in the set of matched elements.
+// Multiple class names can be specified, separated by a space or via multiple arguments.
+func (s *Selection) ToggleClass(class ...string) *Selection {
+ classStr := strings.TrimSpace(strings.Join(class, " "))
+
+ if classStr == "" {
+ return s
+ }
+
+ tcls := getClassesSlice(classStr)
+
+ for _, n := range s.Nodes {
+ classes, attr := getClassesAndAttr(n, true)
+ for _, tcl := range tcls {
+ if strings.Contains(classes, " "+tcl+" ") {
+ classes = strings.Replace(classes, " "+tcl+" ", " ", -1)
+ } else {
+ classes += tcl + " "
+ }
+ }
+
+ setClasses(n, attr, classes)
+ }
+
+ return s
+}
+
+func getAttributePtr(attrName string, n *html.Node) *html.Attribute {
+ if n == nil {
+ return nil
+ }
+
+ for i, a := range n.Attr {
+ if a.Key == attrName {
+ return &n.Attr[i]
+ }
+ }
+ return nil
+}
+
+// Private function to get the specified attribute's value from a node.
+func getAttributeValue(attrName string, n *html.Node) (val string, exists bool) {
+ if a := getAttributePtr(attrName, n); a != nil {
+ val = a.Val
+ exists = true
+ }
+ return
+}
+
+// Get and normalize the "class" attribute from the node.
+func getClassesAndAttr(n *html.Node, create bool) (classes string, attr *html.Attribute) {
+ // Applies only to element nodes
+ if n.Type == html.ElementNode {
+ attr = getAttributePtr("class", n)
+ if attr == nil && create {
+ n.Attr = append(n.Attr, html.Attribute{
+ Key: "class",
+ Val: "",
+ })
+ attr = &n.Attr[len(n.Attr)-1]
+ }
+ }
+
+ if attr == nil {
+ classes = " "
+ } else {
+ classes = rxClassTrim.ReplaceAllString(" "+attr.Val+" ", " ")
+ }
+
+ return
+}
+
+func getClassesSlice(classes string) []string {
+ return strings.Split(rxClassTrim.ReplaceAllString(" "+classes+" ", " "), " ")
+}
+
+func removeAttr(n *html.Node, attrName string) {
+ for i, a := range n.Attr {
+ if a.Key == attrName {
+ n.Attr[i], n.Attr[len(n.Attr)-1], n.Attr =
+ n.Attr[len(n.Attr)-1], html.Attribute{}, n.Attr[:len(n.Attr)-1]
+ return
+ }
+ }
+}
+
+func setClasses(n *html.Node, attr *html.Attribute, classes string) {
+ classes = strings.TrimSpace(classes)
+ if classes == "" {
+ removeAttr(n, "class")
+ return
+ }
+
+ attr.Val = classes
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/property_test.go b/vendor/github.com/PuerkitoBio/goquery/property_test.go
new file mode 100644
index 0000000..1095dcc
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/property_test.go
@@ -0,0 +1,252 @@
+package goquery
+
+import (
+ "regexp"
+ "strings"
+ "testing"
+)
+
+func TestAttrExists(t *testing.T) {
+ if val, ok := Doc().Find("a").Attr("href"); !ok {
+ t.Error("Expected a value for the href attribute.")
+ } else {
+ t.Logf("Href of first anchor: %v.", val)
+ }
+}
+
+func TestAttrOr(t *testing.T) {
+ if val := Doc().Find("a").AttrOr("fake-attribute", "alternative"); val != "alternative" {
+ t.Error("Expected an alternative value for 'fake-attribute' attribute.")
+ } else {
+ t.Logf("Value returned for not existing attribute: %v.", val)
+ }
+ if val := Doc().Find("zz").AttrOr("fake-attribute", "alternative"); val != "alternative" {
+ t.Error("Expected an alternative value for 'fake-attribute' on an empty selection.")
+ } else {
+ t.Logf("Value returned for empty selection: %v.", val)
+ }
+}
+
+func TestAttrNotExist(t *testing.T) {
+ if val, ok := Doc().Find("div.row-fluid").Attr("href"); ok {
+ t.Errorf("Expected no value for the href attribute, got %v.", val)
+ }
+}
+
+func TestRemoveAttr(t *testing.T) {
+ sel := Doc2Clone().Find("div")
+
+ sel.RemoveAttr("id")
+
+ _, ok := sel.Attr("id")
+ if ok {
+ t.Error("Expected there to be no id attributes set")
+ }
+}
+
+func TestSetAttr(t *testing.T) {
+ sel := Doc2Clone().Find("#main")
+
+ sel.SetAttr("id", "not-main")
+
+ val, ok := sel.Attr("id")
+ if !ok {
+ t.Error("Expected an id attribute on main")
+ }
+
+ if val != "not-main" {
+ t.Errorf("Expected an attribute id to be not-main, got %s", val)
+ }
+}
+
+func TestSetAttr2(t *testing.T) {
+ sel := Doc2Clone().Find("#main")
+
+ sel.SetAttr("foo", "bar")
+
+ val, ok := sel.Attr("foo")
+ if !ok {
+ t.Error("Expected an 'foo' attribute on main")
+ }
+
+ if val != "bar" {
+ t.Errorf("Expected an attribute 'foo' to be 'bar', got '%s'", val)
+ }
+}
+
+func TestText(t *testing.T) {
+ txt := Doc().Find("h1").Text()
+ if strings.Trim(txt, " \n\r\t") != "Provok.in" {
+ t.Errorf("Expected text to be Provok.in, found %s.", txt)
+ }
+}
+
+func TestText2(t *testing.T) {
+ txt := Doc().Find(".hero-unit .container-fluid .row-fluid:nth-child(1)").Text()
+ if ok, e := regexp.MatchString(`^\s+Provok\.in\s+Prove your point.\s+$`, txt); !ok || e != nil {
+ t.Errorf("Expected text to be Provok.in Prove your point., found %s.", txt)
+ if e != nil {
+ t.Logf("Error: %s.", e.Error())
+ }
+ }
+}
+
+func TestText3(t *testing.T) {
+ txt := Doc().Find(".pvk-gutter").First().Text()
+ // There's an character in there...
+ if ok, e := regexp.MatchString(`^[\s\x{00A0}]+$`, txt); !ok || e != nil {
+ t.Errorf("Expected spaces, found <%v>.", txt)
+ if e != nil {
+ t.Logf("Error: %s.", e.Error())
+ }
+ }
+}
+
+func TestHtml(t *testing.T) {
+ txt, e := Doc().Find("h1").Html()
+ if e != nil {
+ t.Errorf("Error: %s.", e)
+ }
+
+ if ok, e := regexp.MatchString(`^\s*Provok\.in\s*$`, txt); !ok || e != nil {
+ t.Errorf("Unexpected HTML content, found %s.", txt)
+ if e != nil {
+ t.Logf("Error: %s.", e.Error())
+ }
+ }
+}
+
+func TestNbsp(t *testing.T) {
+ src := `
Some text
`
+ d, err := NewDocumentFromReader(strings.NewReader(src))
+ if err != nil {
+ t.Fatal(err)
+ }
+ txt := d.Find("p").Text()
+ ix := strings.Index(txt, "\u00a0")
+ if ix != 4 {
+ t.Errorf("Text: expected a non-breaking space at index 4, got %d", ix)
+ }
+
+ h, err := d.Find("p").Html()
+ if err != nil {
+ t.Fatal(err)
+ }
+ ix = strings.Index(h, "\u00a0")
+ if ix != 4 {
+ t.Errorf("Html: expected a non-breaking space at index 4, got %d", ix)
+ }
+}
+
+func TestAddClass(t *testing.T) {
+ sel := Doc2Clone().Find("#main")
+ sel.AddClass("main main main")
+
+ // Make sure that class was only added once
+ if a, ok := sel.Attr("class"); !ok || a != "main" {
+ t.Error("Expected #main to have class main")
+ }
+}
+
+func TestAddClassSimilar(t *testing.T) {
+ sel := Doc2Clone().Find("#nf5")
+ sel.AddClass("odd")
+
+ assertClass(t, sel, "odd")
+ assertClass(t, sel, "odder")
+ printSel(t, sel.Parent())
+}
+
+func TestAddEmptyClass(t *testing.T) {
+ sel := Doc2Clone().Find("#main")
+ sel.AddClass("")
+
+ // Make sure that class was only added once
+ if a, ok := sel.Attr("class"); ok {
+ t.Errorf("Expected #main to not to have a class, have: %s", a)
+ }
+}
+
+func TestAddClasses(t *testing.T) {
+ sel := Doc2Clone().Find("#main")
+ sel.AddClass("a b")
+
+ // Make sure that class was only added once
+ if !sel.HasClass("a") || !sel.HasClass("b") {
+ t.Errorf("#main does not have classes")
+ }
+}
+
+func TestHasClass(t *testing.T) {
+ sel := Doc().Find("div")
+ if !sel.HasClass("span12") {
+ t.Error("Expected at least one div to have class span12.")
+ }
+}
+
+func TestHasClassNone(t *testing.T) {
+ sel := Doc().Find("h2")
+ if sel.HasClass("toto") {
+ t.Error("Expected h1 to have no class.")
+ }
+}
+
+func TestHasClassNotFirst(t *testing.T) {
+ sel := Doc().Find(".alert")
+ if !sel.HasClass("alert-error") {
+ t.Error("Expected .alert to also have class .alert-error.")
+ }
+}
+
+func TestRemoveClass(t *testing.T) {
+ sel := Doc2Clone().Find("#nf1")
+ sel.RemoveClass("one row")
+
+ if !sel.HasClass("even") || sel.HasClass("one") || sel.HasClass("row") {
+ classes, _ := sel.Attr("class")
+ t.Error("Expected #nf1 to have class even, has ", classes)
+ }
+}
+
+func TestRemoveClassSimilar(t *testing.T) {
+ sel := Doc2Clone().Find("#nf5, #nf6")
+ assertLength(t, sel.Nodes, 2)
+
+ sel.RemoveClass("odd")
+ assertClass(t, sel.Eq(0), "odder")
+ printSel(t, sel)
+}
+
+func TestRemoveAllClasses(t *testing.T) {
+ sel := Doc2Clone().Find("#nf1")
+ sel.RemoveClass()
+
+ if a, ok := sel.Attr("class"); ok {
+ t.Error("All classes were not removed, has ", a)
+ }
+
+ sel = Doc2Clone().Find("#main")
+ sel.RemoveClass()
+ if a, ok := sel.Attr("class"); ok {
+ t.Error("All classes were not removed, has ", a)
+ }
+}
+
+func TestToggleClass(t *testing.T) {
+ sel := Doc2Clone().Find("#nf1")
+
+ sel.ToggleClass("one")
+ if sel.HasClass("one") {
+ t.Error("Expected #nf1 to not have class one")
+ }
+
+ sel.ToggleClass("one")
+ if !sel.HasClass("one") {
+ t.Error("Expected #nf1 to have class one")
+ }
+
+ sel.ToggleClass("one even row")
+ if a, ok := sel.Attr("class"); ok {
+ t.Errorf("Expected #nf1 to have no classes, have %q", a)
+ }
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/query.go b/vendor/github.com/PuerkitoBio/goquery/query.go
new file mode 100644
index 0000000..fe86bf0
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/query.go
@@ -0,0 +1,49 @@
+package goquery
+
+import "golang.org/x/net/html"
+
+// Is checks the current matched set of elements against a selector and
+// returns true if at least one of these elements matches.
+func (s *Selection) Is(selector string) bool {
+ return s.IsMatcher(compileMatcher(selector))
+}
+
+// IsMatcher checks the current matched set of elements against a matcher and
+// returns true if at least one of these elements matches.
+func (s *Selection) IsMatcher(m Matcher) bool {
+ if len(s.Nodes) > 0 {
+ if len(s.Nodes) == 1 {
+ return m.Match(s.Nodes[0])
+ }
+ return len(m.Filter(s.Nodes)) > 0
+ }
+
+ return false
+}
+
+// IsFunction checks the current matched set of elements against a predicate and
+// returns true if at least one of these elements matches.
+func (s *Selection) IsFunction(f func(int, *Selection) bool) bool {
+ return s.FilterFunction(f).Length() > 0
+}
+
+// IsSelection checks the current matched set of elements against a Selection object
+// and returns true if at least one of these elements matches.
+func (s *Selection) IsSelection(sel *Selection) bool {
+ return s.FilterSelection(sel).Length() > 0
+}
+
+// IsNodes checks the current matched set of elements against the specified nodes
+// and returns true if at least one of these elements matches.
+func (s *Selection) IsNodes(nodes ...*html.Node) bool {
+ return s.FilterNodes(nodes...).Length() > 0
+}
+
+// Contains returns true if the specified Node is within,
+// at any depth, one of the nodes in the Selection object.
+// It is NOT inclusive, to behave like jQuery's implementation, and
+// unlike Javascript's .contains, so if the contained
+// node is itself in the selection, it returns false.
+func (s *Selection) Contains(n *html.Node) bool {
+ return sliceContains(s.Nodes, n)
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/query_test.go b/vendor/github.com/PuerkitoBio/goquery/query_test.go
new file mode 100644
index 0000000..54b2a2e
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/query_test.go
@@ -0,0 +1,103 @@
+package goquery
+
+import (
+ "testing"
+)
+
+func TestIs(t *testing.T) {
+ sel := Doc().Find(".footer p:nth-child(1)")
+ if !sel.Is("p") {
+ t.Error("Expected .footer p:nth-child(1) to be p.")
+ }
+}
+
+func TestIsInvalid(t *testing.T) {
+ sel := Doc().Find(".footer p:nth-child(1)")
+ if sel.Is("") {
+ t.Error("Is should not succeed with invalid selector string")
+ }
+}
+
+func TestIsPositional(t *testing.T) {
+ sel := Doc().Find(".footer p:nth-child(2)")
+ if !sel.Is("p:nth-child(2)") {
+ t.Error("Expected .footer p:nth-child(2) to be p:nth-child(2).")
+ }
+}
+
+func TestIsPositionalNot(t *testing.T) {
+ sel := Doc().Find(".footer p:nth-child(1)")
+ if sel.Is("p:nth-child(2)") {
+ t.Error("Expected .footer p:nth-child(1) NOT to be p:nth-child(2).")
+ }
+}
+
+func TestIsFunction(t *testing.T) {
+ ok := Doc().Find("div").IsFunction(func(i int, s *Selection) bool {
+ return s.HasClass("container-fluid")
+ })
+
+ if !ok {
+ t.Error("Expected some div to have a container-fluid class.")
+ }
+}
+
+func TestIsFunctionRollback(t *testing.T) {
+ ok := Doc().Find("div").IsFunction(func(i int, s *Selection) bool {
+ return s.HasClass("container-fluid")
+ })
+
+ if !ok {
+ t.Error("Expected some div to have a container-fluid class.")
+ }
+}
+
+func TestIsSelection(t *testing.T) {
+ sel := Doc().Find("div")
+ sel2 := Doc().Find(".pvk-gutter")
+
+ if !sel.IsSelection(sel2) {
+ t.Error("Expected some div to have a pvk-gutter class.")
+ }
+}
+
+func TestIsSelectionNot(t *testing.T) {
+ sel := Doc().Find("div")
+ sel2 := Doc().Find("a")
+
+ if sel.IsSelection(sel2) {
+ t.Error("Expected some div NOT to be an anchor.")
+ }
+}
+
+func TestIsNodes(t *testing.T) {
+ sel := Doc().Find("div")
+ sel2 := Doc().Find(".footer")
+
+ if !sel.IsNodes(sel2.Nodes[0]) {
+ t.Error("Expected some div to have a footer class.")
+ }
+}
+
+func TestDocContains(t *testing.T) {
+ sel := Doc().Find("h1")
+ if !Doc().Contains(sel.Nodes[0]) {
+ t.Error("Expected document to contain H1 tag.")
+ }
+}
+
+func TestSelContains(t *testing.T) {
+ sel := Doc().Find(".row-fluid")
+ sel2 := Doc().Find("a[ng-click]")
+ if !sel.Contains(sel2.Nodes[0]) {
+ t.Error("Expected .row-fluid to contain a[ng-click] tag.")
+ }
+}
+
+func TestSelNotContains(t *testing.T) {
+ sel := Doc().Find("a.link")
+ sel2 := Doc().Find("span")
+ if sel.Contains(sel2.Nodes[0]) {
+ t.Error("Expected a.link to NOT contain span tag.")
+ }
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/testdata/gotesting.html b/vendor/github.com/PuerkitoBio/goquery/testdata/gotesting.html
new file mode 100644
index 0000000..ba5348f
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/testdata/gotesting.html
@@ -0,0 +1,855 @@
+
+
+
+
+
+ testing - The Go Programming Language
+
+
+
+
+
+
+
+
+
+
+
+Package testing provides support for automated testing of Go packages.
+It is intended to be used in concert with the “go test” command, which automates
+execution of any function of the form
+
+
func TestXxx(*testing.T)
+
+
+where Xxx can be any alphanumeric string (but the first letter must not be in
+[a-z]) and serves to identify the test routine.
+These TestXxx routines should be declared within the package they are testing.
+
+
+Functions of the form
+
+
func BenchmarkXxx(*testing.B)
+
+
+are considered benchmarks, and are executed by the "go test" command when
+the -test.bench flag is provided.
+
+
+A sample benchmark function looks like this:
+
+
func BenchmarkHello(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ fmt.Sprintf("hello")
+ }
+}
+
+
+The benchmark package will vary b.N until the benchmark function lasts
+long enough to be timed reliably. The output
+
+
testing.BenchmarkHello 10000000 282 ns/op
+
+
+means that the loop ran 10000000 times at a speed of 282 ns per loop.
+
+
+If a benchmark needs some expensive setup before running, the timer
+may be stopped:
+
+
func BenchmarkBigLen(b *testing.B) {
+ b.StopTimer()
+ big := NewBig()
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ big.Len()
+ }
+}
+
+
+The package also runs and verifies example code. Example functions may
+include a concluding comment that begins with "Output:" and is compared with
+the standard output of the function when the tests are run, as in these
+examples of an example:
+
+Multiple example functions for a type/function/method may be provided by
+appending a distinct suffix to the name. The suffix must start with a
+lower-case letter.
+
+The entire test file is presented as the example when it contains a single
+example function, at least one other function, type, variable, or constant
+declaration, and no test or benchmark functions.
+
+StartTimer starts timing a test. This function is called automatically
+before a benchmark starts, but it can also used to resume timing after
+a call to StopTimer.
+
type BenchmarkResult struct {
+ N int // The number of iterations.
+ T time.Duration // The total time taken.
+ Bytes int64 // Bytes processed in one iteration.
+}
type T struct {
+ // contains filtered or unexported fields
+}
+
+T is a type passed to Test functions to manage test state and support formatted test logs.
+Logs are accumulated during execution and dumped to standard error when done.
+
The syntax of Go is broadly similar to that of C: blocks of code are surrounded with curly braces; common control flow structures include for, switch, and if. Unlike C, line-ending semicolons are optional, variable declarations are written differently and are usually optional, type conversions must be made explicit, and new go and select control keywords have been introduced to support concurrent programming. New built-in types include maps, Unicode strings, array slices, and channels for inter-thread communication.
+
Go is designed for exceptionally fast compiling times, even on modest hardware.[10] The language requires garbage collection. Certain concurrency-related structural conventions of Go (channels and alternative channel inputs) are borrowed from Tony Hoare'sCSP. Unlike previous concurrent programming languages such as occam or Limbo, Go does not provide any built-in notion of safe or verifiable concurrency.[11]
+
Of features found in C++ or Java, Go does not include type inheritance, generic programming, assertions, method overloading, or pointer arithmetic.[2] Of these, the Go authors express an openness to generic programming, explicitly argue against assertions and pointer arithmetic, while defending the choice to omit type inheritance as giving a more useful language, encouraging heavy use of interfaces instead.[2] Initially, the language did not include exception handling, but in March 2010 a mechanism known as panic/recover was implemented to handle exceptional errors while avoiding some of the problems the Go authors find with exceptions.[12][13]
Go allows a programmer to write functions that can operate on inputs of arbitrary type, provided that the type implements the functions defined by a given interface.
+
Unlike Java, the interfaces a type supports do not need to be specified at the point at which the type is defined, and Go interfaces do not participate in a type hierarchy. A Go interface is best described as a set of methods, each identified by a name and signature. A type is considered to implement an interface if all the required methods have been defined for that type. An interface can be declared to "embed" other interfaces, meaning the declared interface includes the methods defined in the other interfaces.[11]
+
Unlike Java, the in-memory representation of an object does not contain a pointer to a virtual method table. Instead a value of interface type is implemented as a pair of a pointer to the object, and a pointer to a dictionary containing implementations of the interface methods for that type.
These four definitions could have been placed in separate files, in different parts of the program. Notably, the programmer who defined the Sequence type did not need to declare that the type implemented HasLength, and the person who implemented the Len method for Sequence did not need to specify that this method was part of HasLength.
Visibility of structures, structure fields, variables, constants, methods, top-level types and functions outside their defining package is defined implicitly according to the capitalization of their identifier.[14]
Go provides goroutines, small lightweight threads; the name alludes to coroutines. Goroutines are created with the go statement from anonymous or named functions.
+
Goroutines are executed in parallel with other goroutines, including their caller. They do not necessarily run in separate threads, but a group of goroutines are multiplexed onto multiple threads — execution control is moved between them by blocking them when sending or receiving messages over channels.
6g/8g/5g (the compilers for AMD64, x86, and ARM respectively) with their supporting tools (collectively known as "gc") based on Ken's previous work on Plan 9's C toolchain.
+
gccgo, a GCC frontend written in C++,[15] and now officially supported as of version 4.6, albeit not part of the standard binary for gcc.[16]
+
+
Both compilers work on Unix-like systems, and a port to Microsoft Windows of the gc compiler and runtime have been integrated in the main distribution. Most of the standard libraries also work on Windows.
+
There is also an unmaintained "tiny" runtime environment that allows Go programs to run on bare hardware.[17]
Go's automatic semicolon insertion feature requires that opening braces not be placed on their own lines, and this is thus the preferred brace style; the examples shown comply with this style.[18]
Michele Simionato wrote in an article for artima.com:[20]
+
+
Here I just wanted to point out the design choices about interfaces and inheritance. Such ideas are not new and it is a shame that no popular language has followed such particular route in the design space. I hope Go will become popular; if not, I hope such ideas will finally enter in a popular language, we are already 10 or 20 years too late :-(
Go is extremely easy to dive into. There are a minimal number of fundamental language concepts and the syntax is clean and designed to be clear and unambiguous. Go is still experimental and still a little rough around the edges.
+
+
Ars Technica interviewed Rob Pike, one of the authors of Go, and asked why a new language was needed. He replied that:[22]
+
+
It wasn't enough to just add features to existing programming languages, because sometimes you can get more in the long run by taking things away. They wanted to start from scratch and rethink everything. ... [But they did not want] to deviate too much from what developers already knew because they wanted to avoid alienating Go's target audience.
The complexity of C++ (even more complexity has been added in the new C++), and the resulting impact on productivity, is no longer justified. All the hoops that the C++ programmer had to jump through in order to use a C-compatible language make no sense anymore -- they're just a waste of time and effort. Now, Go makes much more sense for the class of problems that C++ was originally intended to solve.
On the day of the general release of the language, Francis McCabe, developer of the Go! programming language (note the exclamation point), requested a name change of Google's language to prevent confusion with his language.[25] The issue was closed by a Google developer on 12 October 2010 with the custom status "Unfortunate", with a comment that "there are many computing products and services named Go. In the 11 months since our release, there has been minimal confusion of the two languages."[26]
^"A Tutorial for the Go Programming Language". The Go Programming Language. Google. http://golang.org/doc/go_tutorial.html. Retrieved 10 March 2010. "In Go the rule about visibility of information is simple: if a name (of a top-level type, function, method, constant or variable, or of a structure field or method) is capitalized, users of the package may see it. Otherwise, the name and hence the thing being named is visible only inside the package in which it is declared."
^"A Tutorial for the Go Programming Language". The Go Programming Language. Google. http://golang.org/doc/go_tutorial.html. Retrieved 10 March 2010. "The one surprise is that it's important to put the opening brace of a construct such as an if statement on the same line as the if; however, if you don't, there are situations that may not compile or may give the wrong result. The language forces the brace style to some extent."
+
+
\ No newline at end of file
diff --git a/vendor/github.com/PuerkitoBio/goquery/testdata/page2.html b/vendor/github.com/PuerkitoBio/goquery/testdata/page2.html
new file mode 100644
index 0000000..4c2f92f
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/testdata/page2.html
@@ -0,0 +1,24 @@
+
+
+
+ Tests for siblings
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/vendor/github.com/PuerkitoBio/goquery/testdata/page3.html b/vendor/github.com/PuerkitoBio/goquery/testdata/page3.html
new file mode 100644
index 0000000..17e8624
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/testdata/page3.html
@@ -0,0 +1,24 @@
+
+
+
+ Tests for siblings
+
+
+
+
hello
+
+
+
+
+
+
+
+
text
+
+
+
+
+
+
+
+
diff --git a/vendor/github.com/PuerkitoBio/goquery/traversal.go b/vendor/github.com/PuerkitoBio/goquery/traversal.go
new file mode 100644
index 0000000..5fa5315
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/traversal.go
@@ -0,0 +1,698 @@
+package goquery
+
+import "golang.org/x/net/html"
+
+type siblingType int
+
+// Sibling type, used internally when iterating over children at the same
+// level (siblings) to specify which nodes are requested.
+const (
+ siblingPrevUntil siblingType = iota - 3
+ siblingPrevAll
+ siblingPrev
+ siblingAll
+ siblingNext
+ siblingNextAll
+ siblingNextUntil
+ siblingAllIncludingNonElements
+)
+
+// Find gets the descendants of each element in the current set of matched
+// elements, filtered by a selector. It returns a new Selection object
+// containing these matched elements.
+func (s *Selection) Find(selector string) *Selection {
+ return pushStack(s, findWithMatcher(s.Nodes, compileMatcher(selector)))
+}
+
+// FindMatcher gets the descendants of each element in the current set of matched
+// elements, filtered by the matcher. It returns a new Selection object
+// containing these matched elements.
+func (s *Selection) FindMatcher(m Matcher) *Selection {
+ return pushStack(s, findWithMatcher(s.Nodes, m))
+}
+
+// FindSelection gets the descendants of each element in the current
+// Selection, filtered by a Selection. It returns a new Selection object
+// containing these matched elements.
+func (s *Selection) FindSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return pushStack(s, nil)
+ }
+ return s.FindNodes(sel.Nodes...)
+}
+
+// FindNodes gets the descendants of each element in the current
+// Selection, filtered by some nodes. It returns a new Selection object
+// containing these matched elements.
+func (s *Selection) FindNodes(nodes ...*html.Node) *Selection {
+ return pushStack(s, mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
+ if sliceContains(s.Nodes, n) {
+ return []*html.Node{n}
+ }
+ return nil
+ }))
+}
+
+// Contents gets the children of each element in the Selection,
+// including text and comment nodes. It returns a new Selection object
+// containing these elements.
+func (s *Selection) Contents() *Selection {
+ return pushStack(s, getChildrenNodes(s.Nodes, siblingAllIncludingNonElements))
+}
+
+// ContentsFiltered gets the children of each element in the Selection,
+// filtered by the specified selector. It returns a new Selection
+// object containing these elements. Since selectors only act on Element nodes,
+// this function is an alias to ChildrenFiltered unless the selector is empty,
+// in which case it is an alias to Contents.
+func (s *Selection) ContentsFiltered(selector string) *Selection {
+ if selector != "" {
+ return s.ChildrenFiltered(selector)
+ }
+ return s.Contents()
+}
+
+// ContentsMatcher gets the children of each element in the Selection,
+// filtered by the specified matcher. It returns a new Selection
+// object containing these elements. Since matchers only act on Element nodes,
+// this function is an alias to ChildrenMatcher.
+func (s *Selection) ContentsMatcher(m Matcher) *Selection {
+ return s.ChildrenMatcher(m)
+}
+
+// Children gets the child elements of each element in the Selection.
+// It returns a new Selection object containing these elements.
+func (s *Selection) Children() *Selection {
+ return pushStack(s, getChildrenNodes(s.Nodes, siblingAll))
+}
+
+// ChildrenFiltered gets the child elements of each element in the Selection,
+// filtered by the specified selector. It returns a new
+// Selection object containing these elements.
+func (s *Selection) ChildrenFiltered(selector string) *Selection {
+ return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), compileMatcher(selector))
+}
+
+// ChildrenMatcher gets the child elements of each element in the Selection,
+// filtered by the specified matcher. It returns a new
+// Selection object containing these elements.
+func (s *Selection) ChildrenMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), m)
+}
+
+// Parent gets the parent of each element in the Selection. It returns a
+// new Selection object containing the matched elements.
+func (s *Selection) Parent() *Selection {
+ return pushStack(s, getParentNodes(s.Nodes))
+}
+
+// ParentFiltered gets the parent of each element in the Selection filtered by a
+// selector. It returns a new Selection object containing the matched elements.
+func (s *Selection) ParentFiltered(selector string) *Selection {
+ return filterAndPush(s, getParentNodes(s.Nodes), compileMatcher(selector))
+}
+
+// ParentMatcher gets the parent of each element in the Selection filtered by a
+// matcher. It returns a new Selection object containing the matched elements.
+func (s *Selection) ParentMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getParentNodes(s.Nodes), m)
+}
+
+// Closest gets the first element that matches the selector by testing the
+// element itself and traversing up through its ancestors in the DOM tree.
+func (s *Selection) Closest(selector string) *Selection {
+ cs := compileMatcher(selector)
+ return s.ClosestMatcher(cs)
+}
+
+// ClosestMatcher gets the first element that matches the matcher by testing the
+// element itself and traversing up through its ancestors in the DOM tree.
+func (s *Selection) ClosestMatcher(m Matcher) *Selection {
+ return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node {
+ // For each node in the selection, test the node itself, then each parent
+ // until a match is found.
+ for ; n != nil; n = n.Parent {
+ if m.Match(n) {
+ return []*html.Node{n}
+ }
+ }
+ return nil
+ }))
+}
+
+// ClosestNodes gets the first element that matches one of the nodes by testing the
+// element itself and traversing up through its ancestors in the DOM tree.
+func (s *Selection) ClosestNodes(nodes ...*html.Node) *Selection {
+ set := make(map[*html.Node]bool)
+ for _, n := range nodes {
+ set[n] = true
+ }
+ return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node {
+ // For each node in the selection, test the node itself, then each parent
+ // until a match is found.
+ for ; n != nil; n = n.Parent {
+ if set[n] {
+ return []*html.Node{n}
+ }
+ }
+ return nil
+ }))
+}
+
+// ClosestSelection gets the first element that matches one of the nodes in the
+// Selection by testing the element itself and traversing up through its ancestors
+// in the DOM tree.
+func (s *Selection) ClosestSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return pushStack(s, nil)
+ }
+ return s.ClosestNodes(sel.Nodes...)
+}
+
+// Parents gets the ancestors of each element in the current Selection. It
+// returns a new Selection object with the matched elements.
+func (s *Selection) Parents() *Selection {
+ return pushStack(s, getParentsNodes(s.Nodes, nil, nil))
+}
+
+// ParentsFiltered gets the ancestors of each element in the current
+// Selection. It returns a new Selection object with the matched elements.
+func (s *Selection) ParentsFiltered(selector string) *Selection {
+ return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), compileMatcher(selector))
+}
+
+// ParentsMatcher gets the ancestors of each element in the current
+// Selection. It returns a new Selection object with the matched elements.
+func (s *Selection) ParentsMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), m)
+}
+
+// ParentsUntil gets the ancestors of each element in the Selection, up to but
+// not including the element matched by the selector. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) ParentsUntil(selector string) *Selection {
+ return pushStack(s, getParentsNodes(s.Nodes, compileMatcher(selector), nil))
+}
+
+// ParentsUntilMatcher gets the ancestors of each element in the Selection, up to but
+// not including the element matched by the matcher. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) ParentsUntilMatcher(m Matcher) *Selection {
+ return pushStack(s, getParentsNodes(s.Nodes, m, nil))
+}
+
+// ParentsUntilSelection gets the ancestors of each element in the Selection,
+// up to but not including the elements in the specified Selection. It returns a
+// new Selection object containing the matched elements.
+func (s *Selection) ParentsUntilSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return s.Parents()
+ }
+ return s.ParentsUntilNodes(sel.Nodes...)
+}
+
+// ParentsUntilNodes gets the ancestors of each element in the Selection,
+// up to but not including the specified nodes. It returns a
+// new Selection object containing the matched elements.
+func (s *Selection) ParentsUntilNodes(nodes ...*html.Node) *Selection {
+ return pushStack(s, getParentsNodes(s.Nodes, nil, nodes))
+}
+
+// ParentsFilteredUntil is like ParentsUntil, with the option to filter the
+// results based on a selector string. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) ParentsFilteredUntil(filterSelector, untilSelector string) *Selection {
+ return filterAndPush(s, getParentsNodes(s.Nodes, compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
+}
+
+// ParentsFilteredUntilMatcher is like ParentsUntilMatcher, with the option to filter the
+// results based on a matcher. It returns a new Selection object containing the matched elements.
+func (s *Selection) ParentsFilteredUntilMatcher(filter, until Matcher) *Selection {
+ return filterAndPush(s, getParentsNodes(s.Nodes, until, nil), filter)
+}
+
+// ParentsFilteredUntilSelection is like ParentsUntilSelection, with the
+// option to filter the results based on a selector string. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) ParentsFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
+ return s.ParentsMatcherUntilSelection(compileMatcher(filterSelector), sel)
+}
+
+// ParentsMatcherUntilSelection is like ParentsUntilSelection, with the
+// option to filter the results based on a matcher. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) ParentsMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
+ if sel == nil {
+ return s.ParentsMatcher(filter)
+ }
+ return s.ParentsMatcherUntilNodes(filter, sel.Nodes...)
+}
+
+// ParentsFilteredUntilNodes is like ParentsUntilNodes, with the
+// option to filter the results based on a selector string. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) ParentsFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
+ return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), compileMatcher(filterSelector))
+}
+
+// ParentsMatcherUntilNodes is like ParentsUntilNodes, with the
+// option to filter the results based on a matcher. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) ParentsMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
+ return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), filter)
+}
+
+// Siblings gets the siblings of each element in the Selection. It returns
+// a new Selection object containing the matched elements.
+func (s *Selection) Siblings() *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil))
+}
+
+// SiblingsFiltered gets the siblings of each element in the Selection
+// filtered by a selector. It returns a new Selection object containing the
+// matched elements.
+func (s *Selection) SiblingsFiltered(selector string) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), compileMatcher(selector))
+}
+
+// SiblingsMatcher gets the siblings of each element in the Selection
+// filtered by a matcher. It returns a new Selection object containing the
+// matched elements.
+func (s *Selection) SiblingsMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), m)
+}
+
+// Next gets the immediately following sibling of each element in the
+// Selection. It returns a new Selection object containing the matched elements.
+func (s *Selection) Next() *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil))
+}
+
+// NextFiltered gets the immediately following sibling of each element in the
+// Selection filtered by a selector. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) NextFiltered(selector string) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), compileMatcher(selector))
+}
+
+// NextMatcher gets the immediately following sibling of each element in the
+// Selection filtered by a matcher. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) NextMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), m)
+}
+
+// NextAll gets all the following siblings of each element in the
+// Selection. It returns a new Selection object containing the matched elements.
+func (s *Selection) NextAll() *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil))
+}
+
+// NextAllFiltered gets all the following siblings of each element in the
+// Selection filtered by a selector. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) NextAllFiltered(selector string) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), compileMatcher(selector))
+}
+
+// NextAllMatcher gets all the following siblings of each element in the
+// Selection filtered by a matcher. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) NextAllMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), m)
+}
+
+// Prev gets the immediately preceding sibling of each element in the
+// Selection. It returns a new Selection object containing the matched elements.
+func (s *Selection) Prev() *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil))
+}
+
+// PrevFiltered gets the immediately preceding sibling of each element in the
+// Selection filtered by a selector. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) PrevFiltered(selector string) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), compileMatcher(selector))
+}
+
+// PrevMatcher gets the immediately preceding sibling of each element in the
+// Selection filtered by a matcher. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) PrevMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), m)
+}
+
+// PrevAll gets all the preceding siblings of each element in the
+// Selection. It returns a new Selection object containing the matched elements.
+func (s *Selection) PrevAll() *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil))
+}
+
+// PrevAllFiltered gets all the preceding siblings of each element in the
+// Selection filtered by a selector. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) PrevAllFiltered(selector string) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), compileMatcher(selector))
+}
+
+// PrevAllMatcher gets all the preceding siblings of each element in the
+// Selection filtered by a matcher. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) PrevAllMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), m)
+}
+
+// NextUntil gets all following siblings of each element up to but not
+// including the element matched by the selector. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) NextUntil(selector string) *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
+ compileMatcher(selector), nil))
+}
+
+// NextUntilMatcher gets all following siblings of each element up to but not
+// including the element matched by the matcher. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) NextUntilMatcher(m Matcher) *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
+ m, nil))
+}
+
+// NextUntilSelection gets all following siblings of each element up to but not
+// including the element matched by the Selection. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) NextUntilSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return s.NextAll()
+ }
+ return s.NextUntilNodes(sel.Nodes...)
+}
+
+// NextUntilNodes gets all following siblings of each element up to but not
+// including the element matched by the nodes. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) NextUntilNodes(nodes ...*html.Node) *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
+ nil, nodes))
+}
+
+// PrevUntil gets all preceding siblings of each element up to but not
+// including the element matched by the selector. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) PrevUntil(selector string) *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
+ compileMatcher(selector), nil))
+}
+
+// PrevUntilMatcher gets all preceding siblings of each element up to but not
+// including the element matched by the matcher. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) PrevUntilMatcher(m Matcher) *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
+ m, nil))
+}
+
+// PrevUntilSelection gets all preceding siblings of each element up to but not
+// including the element matched by the Selection. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) PrevUntilSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return s.PrevAll()
+ }
+ return s.PrevUntilNodes(sel.Nodes...)
+}
+
+// PrevUntilNodes gets all preceding siblings of each element up to but not
+// including the element matched by the nodes. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) PrevUntilNodes(nodes ...*html.Node) *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
+ nil, nodes))
+}
+
+// NextFilteredUntil is like NextUntil, with the option to filter
+// the results based on a selector string.
+// It returns a new Selection object containing the matched elements.
+func (s *Selection) NextFilteredUntil(filterSelector, untilSelector string) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
+ compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
+}
+
+// NextFilteredUntilMatcher is like NextUntilMatcher, with the option to filter
+// the results based on a matcher.
+// It returns a new Selection object containing the matched elements.
+func (s *Selection) NextFilteredUntilMatcher(filter, until Matcher) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
+ until, nil), filter)
+}
+
+// NextFilteredUntilSelection is like NextUntilSelection, with the
+// option to filter the results based on a selector string. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) NextFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
+ return s.NextMatcherUntilSelection(compileMatcher(filterSelector), sel)
+}
+
+// NextMatcherUntilSelection is like NextUntilSelection, with the
+// option to filter the results based on a matcher. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) NextMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
+ if sel == nil {
+ return s.NextMatcher(filter)
+ }
+ return s.NextMatcherUntilNodes(filter, sel.Nodes...)
+}
+
+// NextFilteredUntilNodes is like NextUntilNodes, with the
+// option to filter the results based on a selector string. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) NextFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
+ nil, nodes), compileMatcher(filterSelector))
+}
+
+// NextMatcherUntilNodes is like NextUntilNodes, with the
+// option to filter the results based on a matcher. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) NextMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
+ nil, nodes), filter)
+}
+
+// PrevFilteredUntil is like PrevUntil, with the option to filter
+// the results based on a selector string.
+// It returns a new Selection object containing the matched elements.
+func (s *Selection) PrevFilteredUntil(filterSelector, untilSelector string) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
+ compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
+}
+
+// PrevFilteredUntilMatcher is like PrevUntilMatcher, with the option to filter
+// the results based on a matcher.
+// It returns a new Selection object containing the matched elements.
+func (s *Selection) PrevFilteredUntilMatcher(filter, until Matcher) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
+ until, nil), filter)
+}
+
+// PrevFilteredUntilSelection is like PrevUntilSelection, with the
+// option to filter the results based on a selector string. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) PrevFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
+ return s.PrevMatcherUntilSelection(compileMatcher(filterSelector), sel)
+}
+
+// PrevMatcherUntilSelection is like PrevUntilSelection, with the
+// option to filter the results based on a matcher. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) PrevMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
+ if sel == nil {
+ return s.PrevMatcher(filter)
+ }
+ return s.PrevMatcherUntilNodes(filter, sel.Nodes...)
+}
+
+// PrevFilteredUntilNodes is like PrevUntilNodes, with the
+// option to filter the results based on a selector string. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) PrevFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
+ nil, nodes), compileMatcher(filterSelector))
+}
+
+// PrevMatcherUntilNodes is like PrevUntilNodes, with the
+// option to filter the results based on a matcher. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) PrevMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
+ nil, nodes), filter)
+}
+
+// Filter and push filters the nodes based on a matcher, and pushes the results
+// on the stack, with the srcSel as previous selection.
+func filterAndPush(srcSel *Selection, nodes []*html.Node, m Matcher) *Selection {
+ // Create a temporary Selection with the specified nodes to filter using winnow
+ sel := &Selection{nodes, srcSel.document, nil}
+ // Filter based on matcher and push on stack
+ return pushStack(srcSel, winnow(sel, m, true))
+}
+
+// Internal implementation of Find that return raw nodes.
+func findWithMatcher(nodes []*html.Node, m Matcher) []*html.Node {
+ // Map nodes to find the matches within the children of each node
+ return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) {
+ // Go down one level, becausejQuery's Find selects only within descendants
+ for c := n.FirstChild; c != nil; c = c.NextSibling {
+ if c.Type == html.ElementNode {
+ result = append(result, m.MatchAll(c)...)
+ }
+ }
+ return
+ })
+}
+
+// Internal implementation to get all parent nodes, stopping at the specified
+// node (or nil if no stop).
+func getParentsNodes(nodes []*html.Node, stopm Matcher, stopNodes []*html.Node) []*html.Node {
+ return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) {
+ for p := n.Parent; p != nil; p = p.Parent {
+ sel := newSingleSelection(p, nil)
+ if stopm != nil {
+ if sel.IsMatcher(stopm) {
+ break
+ }
+ } else if len(stopNodes) > 0 {
+ if sel.IsNodes(stopNodes...) {
+ break
+ }
+ }
+ if p.Type == html.ElementNode {
+ result = append(result, p)
+ }
+ }
+ return
+ })
+}
+
+// Internal implementation of sibling nodes that return a raw slice of matches.
+func getSiblingNodes(nodes []*html.Node, st siblingType, untilm Matcher, untilNodes []*html.Node) []*html.Node {
+ var f func(*html.Node) bool
+
+ // If the requested siblings are ...Until, create the test function to
+ // determine if the until condition is reached (returns true if it is)
+ if st == siblingNextUntil || st == siblingPrevUntil {
+ f = func(n *html.Node) bool {
+ if untilm != nil {
+ // Matcher-based condition
+ sel := newSingleSelection(n, nil)
+ return sel.IsMatcher(untilm)
+ } else if len(untilNodes) > 0 {
+ // Nodes-based condition
+ sel := newSingleSelection(n, nil)
+ return sel.IsNodes(untilNodes...)
+ }
+ return false
+ }
+ }
+
+ return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
+ return getChildrenWithSiblingType(n.Parent, st, n, f)
+ })
+}
+
+// Gets the children nodes of each node in the specified slice of nodes,
+// based on the sibling type request.
+func getChildrenNodes(nodes []*html.Node, st siblingType) []*html.Node {
+ return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
+ return getChildrenWithSiblingType(n, st, nil, nil)
+ })
+}
+
+// Gets the children of the specified parent, based on the requested sibling
+// type, skipping a specified node if required.
+func getChildrenWithSiblingType(parent *html.Node, st siblingType, skipNode *html.Node,
+ untilFunc func(*html.Node) bool) (result []*html.Node) {
+
+ // Create the iterator function
+ var iter = func(cur *html.Node) (ret *html.Node) {
+ // Based on the sibling type requested, iterate the right way
+ for {
+ switch st {
+ case siblingAll, siblingAllIncludingNonElements:
+ if cur == nil {
+ // First iteration, start with first child of parent
+ // Skip node if required
+ if ret = parent.FirstChild; ret == skipNode && skipNode != nil {
+ ret = skipNode.NextSibling
+ }
+ } else {
+ // Skip node if required
+ if ret = cur.NextSibling; ret == skipNode && skipNode != nil {
+ ret = skipNode.NextSibling
+ }
+ }
+ case siblingPrev, siblingPrevAll, siblingPrevUntil:
+ if cur == nil {
+ // Start with previous sibling of the skip node
+ ret = skipNode.PrevSibling
+ } else {
+ ret = cur.PrevSibling
+ }
+ case siblingNext, siblingNextAll, siblingNextUntil:
+ if cur == nil {
+ // Start with next sibling of the skip node
+ ret = skipNode.NextSibling
+ } else {
+ ret = cur.NextSibling
+ }
+ default:
+ panic("Invalid sibling type.")
+ }
+ if ret == nil || ret.Type == html.ElementNode || st == siblingAllIncludingNonElements {
+ return
+ }
+ // Not a valid node, try again from this one
+ cur = ret
+ }
+ }
+
+ for c := iter(nil); c != nil; c = iter(c) {
+ // If this is an ...Until case, test before append (returns true
+ // if the until condition is reached)
+ if st == siblingNextUntil || st == siblingPrevUntil {
+ if untilFunc(c) {
+ return
+ }
+ }
+ result = append(result, c)
+ if st == siblingNext || st == siblingPrev {
+ // Only one node was requested (immediate next or previous), so exit
+ return
+ }
+ }
+ return
+}
+
+// Internal implementation of parent nodes that return a raw slice of Nodes.
+func getParentNodes(nodes []*html.Node) []*html.Node {
+ return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
+ if n.Parent != nil && n.Parent.Type == html.ElementNode {
+ return []*html.Node{n.Parent}
+ }
+ return nil
+ })
+}
+
+// Internal map function used by many traversing methods. Takes the source nodes
+// to iterate on and the mapping function that returns an array of nodes.
+// Returns an array of nodes mapped by calling the callback function once for
+// each node in the source nodes.
+func mapNodes(nodes []*html.Node, f func(int, *html.Node) []*html.Node) (result []*html.Node) {
+ set := make(map[*html.Node]bool)
+ for i, n := range nodes {
+ if vals := f(i, n); len(vals) > 0 {
+ result = appendWithoutDuplicates(result, vals, set)
+ }
+ }
+ return result
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/traversal_test.go b/vendor/github.com/PuerkitoBio/goquery/traversal_test.go
new file mode 100644
index 0000000..04383a4
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/traversal_test.go
@@ -0,0 +1,793 @@
+package goquery
+
+import (
+ "strings"
+ "testing"
+)
+
+func TestFind(t *testing.T) {
+ sel := Doc().Find("div.row-fluid")
+ assertLength(t, sel.Nodes, 9)
+}
+
+func TestFindRollback(t *testing.T) {
+ sel := Doc().Find("div.row-fluid")
+ sel2 := sel.Find("a").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestFindNotSelf(t *testing.T) {
+ sel := Doc().Find("h1").Find("h1")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestFindInvalid(t *testing.T) {
+ sel := Doc().Find(":+ ^")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestFindBig(t *testing.T) {
+ doc := DocW()
+ sel := doc.Find("li")
+ assertLength(t, sel.Nodes, 373)
+ sel2 := doc.Find("span")
+ assertLength(t, sel2.Nodes, 448)
+ sel3 := sel.FindSelection(sel2)
+ assertLength(t, sel3.Nodes, 248)
+}
+
+func TestChainedFind(t *testing.T) {
+ sel := Doc().Find("div.hero-unit").Find(".row-fluid")
+ assertLength(t, sel.Nodes, 4)
+}
+
+func TestChainedFindInvalid(t *testing.T) {
+ sel := Doc().Find("div.hero-unit").Find("")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestChildren(t *testing.T) {
+ sel := Doc().Find(".pvk-content").Children()
+ assertLength(t, sel.Nodes, 5)
+}
+
+func TestChildrenRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.Children().End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestContents(t *testing.T) {
+ sel := Doc().Find(".pvk-content").Contents()
+ assertLength(t, sel.Nodes, 13)
+}
+
+func TestContentsRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.Contents().End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestChildrenFiltered(t *testing.T) {
+ sel := Doc().Find(".pvk-content").ChildrenFiltered(".hero-unit")
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestChildrenFilteredInvalid(t *testing.T) {
+ sel := Doc().Find(".pvk-content").ChildrenFiltered("")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestChildrenFilteredRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.ChildrenFiltered(".hero-unit").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestContentsFiltered(t *testing.T) {
+ sel := Doc().Find(".pvk-content").ContentsFiltered(".hero-unit")
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestContentsFilteredInvalid(t *testing.T) {
+ sel := Doc().Find(".pvk-content").ContentsFiltered("~")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestContentsFilteredRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-content")
+ sel2 := sel.ContentsFiltered(".hero-unit").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestChildrenFilteredNone(t *testing.T) {
+ sel := Doc().Find(".pvk-content").ChildrenFiltered("a.btn")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestParent(t *testing.T) {
+ sel := Doc().Find(".container-fluid").Parent()
+ assertLength(t, sel.Nodes, 3)
+}
+
+func TestParentRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := sel.Parent().End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestParentBody(t *testing.T) {
+ sel := Doc().Find("body").Parent()
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestParentFiltered(t *testing.T) {
+ sel := Doc().Find(".container-fluid").ParentFiltered(".hero-unit")
+ assertLength(t, sel.Nodes, 1)
+ assertClass(t, sel, "hero-unit")
+}
+
+func TestParentFilteredInvalid(t *testing.T) {
+ sel := Doc().Find(".container-fluid").ParentFiltered("")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestParentFilteredRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := sel.ParentFiltered(".hero-unit").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestParents(t *testing.T) {
+ sel := Doc().Find(".container-fluid").Parents()
+ assertLength(t, sel.Nodes, 8)
+}
+
+func TestParentsOrder(t *testing.T) {
+ sel := Doc().Find("#cf2").Parents()
+ assertLength(t, sel.Nodes, 6)
+ assertSelectionIs(t, sel, ".hero-unit", ".pvk-content", "div.row-fluid", "#cf1", "body", "html")
+}
+
+func TestParentsRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := sel.Parents().End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestParentsFiltered(t *testing.T) {
+ sel := Doc().Find(".container-fluid").ParentsFiltered("body")
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestParentsFilteredInvalid(t *testing.T) {
+ sel := Doc().Find(".container-fluid").ParentsFiltered("")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestParentsFilteredRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := sel.ParentsFiltered("body").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestParentsUntil(t *testing.T) {
+ sel := Doc().Find(".container-fluid").ParentsUntil("body")
+ assertLength(t, sel.Nodes, 6)
+}
+
+func TestParentsUntilInvalid(t *testing.T) {
+ sel := Doc().Find(".container-fluid").ParentsUntil("")
+ assertLength(t, sel.Nodes, 8)
+}
+
+func TestParentsUntilRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := sel.ParentsUntil("body").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestParentsUntilSelection(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := Doc().Find(".pvk-content")
+ sel = sel.ParentsUntilSelection(sel2)
+ assertLength(t, sel.Nodes, 3)
+}
+
+func TestParentsUntilSelectionRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := Doc().Find(".pvk-content")
+ sel2 = sel.ParentsUntilSelection(sel2).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestParentsUntilNodes(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := Doc().Find(".pvk-content, .hero-unit")
+ sel = sel.ParentsUntilNodes(sel2.Nodes...)
+ assertLength(t, sel.Nodes, 2)
+}
+
+func TestParentsUntilNodesRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := Doc().Find(".pvk-content, .hero-unit")
+ sel2 = sel.ParentsUntilNodes(sel2.Nodes...).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestParentsFilteredUntil(t *testing.T) {
+ sel := Doc().Find(".container-fluid").ParentsFilteredUntil(".pvk-content", "body")
+ assertLength(t, sel.Nodes, 2)
+}
+
+func TestParentsFilteredUntilInvalid(t *testing.T) {
+ sel := Doc().Find(".container-fluid").ParentsFilteredUntil("", "")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestParentsFilteredUntilRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := sel.ParentsFilteredUntil(".pvk-content", "body").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestParentsFilteredUntilSelection(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := Doc().Find(".row-fluid")
+ sel = sel.ParentsFilteredUntilSelection("div", sel2)
+ assertLength(t, sel.Nodes, 3)
+}
+
+func TestParentsFilteredUntilSelectionRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := Doc().Find(".row-fluid")
+ sel2 = sel.ParentsFilteredUntilSelection("div", sel2).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestParentsFilteredUntilNodes(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := Doc().Find(".row-fluid")
+ sel = sel.ParentsFilteredUntilNodes("body", sel2.Nodes...)
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestParentsFilteredUntilNodesRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := Doc().Find(".row-fluid")
+ sel2 = sel.ParentsFilteredUntilNodes("body", sel2.Nodes...).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestSiblings(t *testing.T) {
+ sel := Doc().Find("h1").Siblings()
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestSiblingsRollback(t *testing.T) {
+ sel := Doc().Find("h1")
+ sel2 := sel.Siblings().End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestSiblings2(t *testing.T) {
+ sel := Doc().Find(".pvk-gutter").Siblings()
+ assertLength(t, sel.Nodes, 9)
+}
+
+func TestSiblings3(t *testing.T) {
+ sel := Doc().Find("body>.container-fluid").Siblings()
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestSiblingsFiltered(t *testing.T) {
+ sel := Doc().Find(".pvk-gutter").SiblingsFiltered(".pvk-content")
+ assertLength(t, sel.Nodes, 3)
+}
+
+func TestSiblingsFilteredInvalid(t *testing.T) {
+ sel := Doc().Find(".pvk-gutter").SiblingsFiltered("")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestSiblingsFilteredRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-gutter")
+ sel2 := sel.SiblingsFiltered(".pvk-content").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestNext(t *testing.T) {
+ sel := Doc().Find("h1").Next()
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestNextRollback(t *testing.T) {
+ sel := Doc().Find("h1")
+ sel2 := sel.Next().End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestNext2(t *testing.T) {
+ sel := Doc().Find(".close").Next()
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestNextNone(t *testing.T) {
+ sel := Doc().Find("small").Next()
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestNextFiltered(t *testing.T) {
+ sel := Doc().Find(".container-fluid").NextFiltered("div")
+ assertLength(t, sel.Nodes, 2)
+}
+
+func TestNextFilteredInvalid(t *testing.T) {
+ sel := Doc().Find(".container-fluid").NextFiltered("")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestNextFilteredRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := sel.NextFiltered("div").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestNextFiltered2(t *testing.T) {
+ sel := Doc().Find(".container-fluid").NextFiltered("[ng-view]")
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestPrev(t *testing.T) {
+ sel := Doc().Find(".red").Prev()
+ assertLength(t, sel.Nodes, 1)
+ assertClass(t, sel, "green")
+}
+
+func TestPrevRollback(t *testing.T) {
+ sel := Doc().Find(".red")
+ sel2 := sel.Prev().End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestPrev2(t *testing.T) {
+ sel := Doc().Find(".row-fluid").Prev()
+ assertLength(t, sel.Nodes, 5)
+}
+
+func TestPrevNone(t *testing.T) {
+ sel := Doc().Find("h2").Prev()
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestPrevFiltered(t *testing.T) {
+ sel := Doc().Find(".row-fluid").PrevFiltered(".row-fluid")
+ assertLength(t, sel.Nodes, 5)
+}
+
+func TestPrevFilteredInvalid(t *testing.T) {
+ sel := Doc().Find(".row-fluid").PrevFiltered("")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestPrevFilteredRollback(t *testing.T) {
+ sel := Doc().Find(".row-fluid")
+ sel2 := sel.PrevFiltered(".row-fluid").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestNextAll(t *testing.T) {
+ sel := Doc().Find("#cf2 div:nth-child(1)").NextAll()
+ assertLength(t, sel.Nodes, 3)
+}
+
+func TestNextAllRollback(t *testing.T) {
+ sel := Doc().Find("#cf2 div:nth-child(1)")
+ sel2 := sel.NextAll().End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestNextAll2(t *testing.T) {
+ sel := Doc().Find("div[ng-cloak]").NextAll()
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestNextAllNone(t *testing.T) {
+ sel := Doc().Find(".footer").NextAll()
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestNextAllFiltered(t *testing.T) {
+ sel := Doc().Find("#cf2 .row-fluid").NextAllFiltered("[ng-cloak]")
+ assertLength(t, sel.Nodes, 2)
+}
+
+func TestNextAllFilteredInvalid(t *testing.T) {
+ sel := Doc().Find("#cf2 .row-fluid").NextAllFiltered("")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestNextAllFilteredRollback(t *testing.T) {
+ sel := Doc().Find("#cf2 .row-fluid")
+ sel2 := sel.NextAllFiltered("[ng-cloak]").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestNextAllFiltered2(t *testing.T) {
+ sel := Doc().Find(".close").NextAllFiltered("h4")
+ assertLength(t, sel.Nodes, 1)
+}
+
+func TestPrevAll(t *testing.T) {
+ sel := Doc().Find("[ng-view]").PrevAll()
+ assertLength(t, sel.Nodes, 2)
+}
+
+func TestPrevAllOrder(t *testing.T) {
+ sel := Doc().Find("[ng-view]").PrevAll()
+ assertLength(t, sel.Nodes, 2)
+ assertSelectionIs(t, sel, "#cf4", "#cf3")
+}
+
+func TestPrevAllRollback(t *testing.T) {
+ sel := Doc().Find("[ng-view]")
+ sel2 := sel.PrevAll().End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestPrevAll2(t *testing.T) {
+ sel := Doc().Find(".pvk-gutter").PrevAll()
+ assertLength(t, sel.Nodes, 6)
+}
+
+func TestPrevAllFiltered(t *testing.T) {
+ sel := Doc().Find(".pvk-gutter").PrevAllFiltered(".pvk-content")
+ assertLength(t, sel.Nodes, 3)
+}
+
+func TestPrevAllFilteredInvalid(t *testing.T) {
+ sel := Doc().Find(".pvk-gutter").PrevAllFiltered("")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestPrevAllFilteredRollback(t *testing.T) {
+ sel := Doc().Find(".pvk-gutter")
+ sel2 := sel.PrevAllFiltered(".pvk-content").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestNextUntil(t *testing.T) {
+ sel := Doc().Find(".alert a").NextUntil("p")
+ assertLength(t, sel.Nodes, 1)
+ assertSelectionIs(t, sel, "h4")
+}
+
+func TestNextUntilInvalid(t *testing.T) {
+ sel := Doc().Find(".alert a").NextUntil("")
+ assertLength(t, sel.Nodes, 2)
+}
+
+func TestNextUntil2(t *testing.T) {
+ sel := Doc().Find("#cf2-1").NextUntil("[ng-cloak]")
+ assertLength(t, sel.Nodes, 1)
+ assertSelectionIs(t, sel, "#cf2-2")
+}
+
+func TestNextUntilOrder(t *testing.T) {
+ sel := Doc().Find("#cf2-1").NextUntil("#cf2-4")
+ assertLength(t, sel.Nodes, 2)
+ assertSelectionIs(t, sel, "#cf2-2", "#cf2-3")
+}
+
+func TestNextUntilRollback(t *testing.T) {
+ sel := Doc().Find("#cf2-1")
+ sel2 := sel.PrevUntil("#cf2-4").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestNextUntilSelection(t *testing.T) {
+ sel := Doc2().Find("#n2")
+ sel2 := Doc2().Find("#n4")
+ sel2 = sel.NextUntilSelection(sel2)
+ assertLength(t, sel2.Nodes, 1)
+ assertSelectionIs(t, sel2, "#n3")
+}
+
+func TestNextUntilSelectionRollback(t *testing.T) {
+ sel := Doc2().Find("#n2")
+ sel2 := Doc2().Find("#n4")
+ sel2 = sel.NextUntilSelection(sel2).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestNextUntilNodes(t *testing.T) {
+ sel := Doc2().Find("#n2")
+ sel2 := Doc2().Find("#n5")
+ sel2 = sel.NextUntilNodes(sel2.Nodes...)
+ assertLength(t, sel2.Nodes, 2)
+ assertSelectionIs(t, sel2, "#n3", "#n4")
+}
+
+func TestNextUntilNodesRollback(t *testing.T) {
+ sel := Doc2().Find("#n2")
+ sel2 := Doc2().Find("#n5")
+ sel2 = sel.NextUntilNodes(sel2.Nodes...).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestPrevUntil(t *testing.T) {
+ sel := Doc().Find(".alert p").PrevUntil("a")
+ assertLength(t, sel.Nodes, 1)
+ assertSelectionIs(t, sel, "h4")
+}
+
+func TestPrevUntilInvalid(t *testing.T) {
+ sel := Doc().Find(".alert p").PrevUntil("")
+ assertLength(t, sel.Nodes, 2)
+}
+
+func TestPrevUntil2(t *testing.T) {
+ sel := Doc().Find("[ng-cloak]").PrevUntil(":not([ng-cloak])")
+ assertLength(t, sel.Nodes, 1)
+ assertSelectionIs(t, sel, "[ng-cloak]")
+}
+
+func TestPrevUntilOrder(t *testing.T) {
+ sel := Doc().Find("#cf2-4").PrevUntil("#cf2-1")
+ assertLength(t, sel.Nodes, 2)
+ assertSelectionIs(t, sel, "#cf2-3", "#cf2-2")
+}
+
+func TestPrevUntilRollback(t *testing.T) {
+ sel := Doc().Find("#cf2-4")
+ sel2 := sel.PrevUntil("#cf2-1").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestPrevUntilSelection(t *testing.T) {
+ sel := Doc2().Find("#n4")
+ sel2 := Doc2().Find("#n2")
+ sel2 = sel.PrevUntilSelection(sel2)
+ assertLength(t, sel2.Nodes, 1)
+ assertSelectionIs(t, sel2, "#n3")
+}
+
+func TestPrevUntilSelectionRollback(t *testing.T) {
+ sel := Doc2().Find("#n4")
+ sel2 := Doc2().Find("#n2")
+ sel2 = sel.PrevUntilSelection(sel2).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestPrevUntilNodes(t *testing.T) {
+ sel := Doc2().Find("#n5")
+ sel2 := Doc2().Find("#n2")
+ sel2 = sel.PrevUntilNodes(sel2.Nodes...)
+ assertLength(t, sel2.Nodes, 2)
+ assertSelectionIs(t, sel2, "#n4", "#n3")
+}
+
+func TestPrevUntilNodesRollback(t *testing.T) {
+ sel := Doc2().Find("#n5")
+ sel2 := Doc2().Find("#n2")
+ sel2 = sel.PrevUntilNodes(sel2.Nodes...).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestNextFilteredUntil(t *testing.T) {
+ sel := Doc2().Find(".two").NextFilteredUntil(".even", ".six")
+ assertLength(t, sel.Nodes, 4)
+ assertSelectionIs(t, sel, "#n3", "#n5", "#nf3", "#nf5")
+}
+
+func TestNextFilteredUntilInvalid(t *testing.T) {
+ sel := Doc2().Find(".two").NextFilteredUntil("", "")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestNextFilteredUntilRollback(t *testing.T) {
+ sel := Doc2().Find(".two")
+ sel2 := sel.NextFilteredUntil(".even", ".six").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestNextFilteredUntilSelection(t *testing.T) {
+ sel := Doc2().Find(".even")
+ sel2 := Doc2().Find(".five")
+ sel = sel.NextFilteredUntilSelection(".even", sel2)
+ assertLength(t, sel.Nodes, 2)
+ assertSelectionIs(t, sel, "#n3", "#nf3")
+}
+
+func TestNextFilteredUntilSelectionRollback(t *testing.T) {
+ sel := Doc2().Find(".even")
+ sel2 := Doc2().Find(".five")
+ sel3 := sel.NextFilteredUntilSelection(".even", sel2).End()
+ assertEqual(t, sel, sel3)
+}
+
+func TestNextFilteredUntilNodes(t *testing.T) {
+ sel := Doc2().Find(".even")
+ sel2 := Doc2().Find(".four")
+ sel = sel.NextFilteredUntilNodes(".odd", sel2.Nodes...)
+ assertLength(t, sel.Nodes, 4)
+ assertSelectionIs(t, sel, "#n2", "#n6", "#nf2", "#nf6")
+}
+
+func TestNextFilteredUntilNodesRollback(t *testing.T) {
+ sel := Doc2().Find(".even")
+ sel2 := Doc2().Find(".four")
+ sel3 := sel.NextFilteredUntilNodes(".odd", sel2.Nodes...).End()
+ assertEqual(t, sel, sel3)
+}
+
+func TestPrevFilteredUntil(t *testing.T) {
+ sel := Doc2().Find(".five").PrevFilteredUntil(".odd", ".one")
+ assertLength(t, sel.Nodes, 4)
+ assertSelectionIs(t, sel, "#n4", "#n2", "#nf4", "#nf2")
+}
+
+func TestPrevFilteredUntilInvalid(t *testing.T) {
+ sel := Doc2().Find(".five").PrevFilteredUntil("", "")
+ assertLength(t, sel.Nodes, 0)
+}
+
+func TestPrevFilteredUntilRollback(t *testing.T) {
+ sel := Doc2().Find(".four")
+ sel2 := sel.PrevFilteredUntil(".odd", ".one").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestPrevFilteredUntilSelection(t *testing.T) {
+ sel := Doc2().Find(".odd")
+ sel2 := Doc2().Find(".two")
+ sel = sel.PrevFilteredUntilSelection(".odd", sel2)
+ assertLength(t, sel.Nodes, 2)
+ assertSelectionIs(t, sel, "#n4", "#nf4")
+}
+
+func TestPrevFilteredUntilSelectionRollback(t *testing.T) {
+ sel := Doc2().Find(".even")
+ sel2 := Doc2().Find(".five")
+ sel3 := sel.PrevFilteredUntilSelection(".even", sel2).End()
+ assertEqual(t, sel, sel3)
+}
+
+func TestPrevFilteredUntilNodes(t *testing.T) {
+ sel := Doc2().Find(".even")
+ sel2 := Doc2().Find(".four")
+ sel = sel.PrevFilteredUntilNodes(".odd", sel2.Nodes...)
+ assertLength(t, sel.Nodes, 2)
+ assertSelectionIs(t, sel, "#n2", "#nf2")
+}
+
+func TestPrevFilteredUntilNodesRollback(t *testing.T) {
+ sel := Doc2().Find(".even")
+ sel2 := Doc2().Find(".four")
+ sel3 := sel.PrevFilteredUntilNodes(".odd", sel2.Nodes...).End()
+ assertEqual(t, sel, sel3)
+}
+
+func TestClosestItself(t *testing.T) {
+ sel := Doc2().Find(".three")
+ sel2 := sel.Closest(".row")
+ assertLength(t, sel2.Nodes, sel.Length())
+ assertSelectionIs(t, sel2, "#n3", "#nf3")
+}
+
+func TestClosestNoDupes(t *testing.T) {
+ sel := Doc().Find(".span12")
+ sel2 := sel.Closest(".pvk-content")
+ assertLength(t, sel2.Nodes, 1)
+ assertClass(t, sel2, "pvk-content")
+}
+
+func TestClosestNone(t *testing.T) {
+ sel := Doc().Find("h4")
+ sel2 := sel.Closest("a")
+ assertLength(t, sel2.Nodes, 0)
+}
+
+func TestClosestInvalid(t *testing.T) {
+ sel := Doc().Find("h4")
+ sel2 := sel.Closest("")
+ assertLength(t, sel2.Nodes, 0)
+}
+
+func TestClosestMany(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := sel.Closest(".pvk-content")
+ assertLength(t, sel2.Nodes, 2)
+ assertSelectionIs(t, sel2, "#pc1", "#pc2")
+}
+
+func TestClosestRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := sel.Closest(".pvk-content").End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestClosestSelectionItself(t *testing.T) {
+ sel := Doc2().Find(".three")
+ sel2 := sel.ClosestSelection(Doc2().Find(".row"))
+ assertLength(t, sel2.Nodes, sel.Length())
+}
+
+func TestClosestSelectionNoDupes(t *testing.T) {
+ sel := Doc().Find(".span12")
+ sel2 := sel.ClosestSelection(Doc().Find(".pvk-content"))
+ assertLength(t, sel2.Nodes, 1)
+ assertClass(t, sel2, "pvk-content")
+}
+
+func TestClosestSelectionNone(t *testing.T) {
+ sel := Doc().Find("h4")
+ sel2 := sel.ClosestSelection(Doc().Find("a"))
+ assertLength(t, sel2.Nodes, 0)
+}
+
+func TestClosestSelectionMany(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := sel.ClosestSelection(Doc().Find(".pvk-content"))
+ assertLength(t, sel2.Nodes, 2)
+ assertSelectionIs(t, sel2, "#pc1", "#pc2")
+}
+
+func TestClosestSelectionRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := sel.ClosestSelection(Doc().Find(".pvk-content")).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestClosestNodesItself(t *testing.T) {
+ sel := Doc2().Find(".three")
+ sel2 := sel.ClosestNodes(Doc2().Find(".row").Nodes...)
+ assertLength(t, sel2.Nodes, sel.Length())
+}
+
+func TestClosestNodesNoDupes(t *testing.T) {
+ sel := Doc().Find(".span12")
+ sel2 := sel.ClosestNodes(Doc().Find(".pvk-content").Nodes...)
+ assertLength(t, sel2.Nodes, 1)
+ assertClass(t, sel2, "pvk-content")
+}
+
+func TestClosestNodesNone(t *testing.T) {
+ sel := Doc().Find("h4")
+ sel2 := sel.ClosestNodes(Doc().Find("a").Nodes...)
+ assertLength(t, sel2.Nodes, 0)
+}
+
+func TestClosestNodesMany(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := sel.ClosestNodes(Doc().Find(".pvk-content").Nodes...)
+ assertLength(t, sel2.Nodes, 2)
+ assertSelectionIs(t, sel2, "#pc1", "#pc2")
+}
+
+func TestClosestNodesRollback(t *testing.T) {
+ sel := Doc().Find(".container-fluid")
+ sel2 := sel.ClosestNodes(Doc().Find(".pvk-content").Nodes...).End()
+ assertEqual(t, sel, sel2)
+}
+
+func TestIssue26(t *testing.T) {
+ img1 := ``
+ img2 := ``
+ cases := []struct {
+ s string
+ l int
+ }{
+ {s: img1 + img2, l: 2},
+ {s: img1, l: 1},
+ {s: img2, l: 1},
+ }
+ for _, c := range cases {
+ doc, err := NewDocumentFromReader(strings.NewReader(c.s))
+ if err != nil {
+ t.Fatal(err)
+ }
+ sel := doc.Find("img[src]")
+ assertLength(t, sel.Nodes, c.l)
+ }
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/type.go b/vendor/github.com/PuerkitoBio/goquery/type.go
new file mode 100644
index 0000000..6ad51db
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/type.go
@@ -0,0 +1,141 @@
+package goquery
+
+import (
+ "errors"
+ "io"
+ "net/http"
+ "net/url"
+
+ "github.com/andybalholm/cascadia"
+
+ "golang.org/x/net/html"
+)
+
+// Document represents an HTML document to be manipulated. Unlike jQuery, which
+// is loaded as part of a DOM document, and thus acts upon its containing
+// document, GoQuery doesn't know which HTML document to act upon. So it needs
+// to be told, and that's what the Document class is for. It holds the root
+// document node to manipulate, and can make selections on this document.
+type Document struct {
+ *Selection
+ Url *url.URL
+ rootNode *html.Node
+}
+
+// NewDocumentFromNode is a Document constructor that takes a root html Node
+// as argument.
+func NewDocumentFromNode(root *html.Node) *Document {
+ return newDocument(root, nil)
+}
+
+// NewDocument is a Document constructor that takes a string URL as argument.
+// It loads the specified document, parses it, and stores the root Document
+// node, ready to be manipulated.
+//
+// Deprecated: Use the net/http standard library package to make the request
+// and validate the response before calling goquery.NewDocumentFromReader
+// with the response's body.
+func NewDocument(url string) (*Document, error) {
+ // Load the URL
+ res, e := http.Get(url)
+ if e != nil {
+ return nil, e
+ }
+ return NewDocumentFromResponse(res)
+}
+
+// NewDocumentFromReader returns a Document from an io.Reader.
+// It returns an error as second value if the reader's data cannot be parsed
+// as html. It does not check if the reader is also an io.Closer, the
+// provided reader is never closed by this call. It is the responsibility
+// of the caller to close it if required.
+func NewDocumentFromReader(r io.Reader) (*Document, error) {
+ root, e := html.Parse(r)
+ if e != nil {
+ return nil, e
+ }
+ return newDocument(root, nil), nil
+}
+
+// NewDocumentFromResponse is another Document constructor that takes an http response as argument.
+// It loads the specified response's document, parses it, and stores the root Document
+// node, ready to be manipulated. The response's body is closed on return.
+//
+// Deprecated: Use goquery.NewDocumentFromReader with the response's body.
+func NewDocumentFromResponse(res *http.Response) (*Document, error) {
+ if res == nil {
+ return nil, errors.New("Response is nil")
+ }
+ defer res.Body.Close()
+ if res.Request == nil {
+ return nil, errors.New("Response.Request is nil")
+ }
+
+ // Parse the HTML into nodes
+ root, e := html.Parse(res.Body)
+ if e != nil {
+ return nil, e
+ }
+
+ // Create and fill the document
+ return newDocument(root, res.Request.URL), nil
+}
+
+// CloneDocument creates a deep-clone of a document.
+func CloneDocument(doc *Document) *Document {
+ return newDocument(cloneNode(doc.rootNode), doc.Url)
+}
+
+// Private constructor, make sure all fields are correctly filled.
+func newDocument(root *html.Node, url *url.URL) *Document {
+ // Create and fill the document
+ d := &Document{nil, url, root}
+ d.Selection = newSingleSelection(root, d)
+ return d
+}
+
+// Selection represents a collection of nodes matching some criteria. The
+// initial Selection can be created by using Document.Find, and then
+// manipulated using the jQuery-like chainable syntax and methods.
+type Selection struct {
+ Nodes []*html.Node
+ document *Document
+ prevSel *Selection
+}
+
+// Helper constructor to create an empty selection
+func newEmptySelection(doc *Document) *Selection {
+ return &Selection{nil, doc, nil}
+}
+
+// Helper constructor to create a selection of only one node
+func newSingleSelection(node *html.Node, doc *Document) *Selection {
+ return &Selection{[]*html.Node{node}, doc, nil}
+}
+
+// Matcher is an interface that defines the methods to match
+// HTML nodes against a compiled selector string. Cascadia's
+// Selector implements this interface.
+type Matcher interface {
+ Match(*html.Node) bool
+ MatchAll(*html.Node) []*html.Node
+ Filter([]*html.Node) []*html.Node
+}
+
+// compileMatcher compiles the selector string s and returns
+// the corresponding Matcher. If s is an invalid selector string,
+// it returns a Matcher that fails all matches.
+func compileMatcher(s string) Matcher {
+ cs, err := cascadia.Compile(s)
+ if err != nil {
+ return invalidMatcher{}
+ }
+ return cs
+}
+
+// invalidMatcher is a Matcher that always fails to match.
+type invalidMatcher struct{}
+
+func (invalidMatcher) Match(n *html.Node) bool { return false }
+func (invalidMatcher) MatchAll(n *html.Node) []*html.Node { return nil }
+func (invalidMatcher) Filter(ns []*html.Node) []*html.Node { return nil }
diff --git a/vendor/github.com/PuerkitoBio/goquery/type_test.go b/vendor/github.com/PuerkitoBio/goquery/type_test.go
new file mode 100644
index 0000000..1e82d5e
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/type_test.go
@@ -0,0 +1,202 @@
+package goquery
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "strings"
+ "testing"
+
+ "golang.org/x/net/html"
+)
+
+// Test helper functions and members
+var doc *Document
+var doc2 *Document
+var doc3 *Document
+var docB *Document
+var docW *Document
+
+func Doc() *Document {
+ if doc == nil {
+ doc = loadDoc("page.html")
+ }
+ return doc
+}
+
+func Doc2() *Document {
+ if doc2 == nil {
+ doc2 = loadDoc("page2.html")
+ }
+ return doc2
+}
+
+func Doc2Clone() *Document {
+ return CloneDocument(Doc2())
+}
+
+func Doc3() *Document {
+ if doc3 == nil {
+ doc3 = loadDoc("page3.html")
+ }
+ return doc3
+}
+
+func Doc3Clone() *Document {
+ return CloneDocument(Doc3())
+}
+
+func DocB() *Document {
+ if docB == nil {
+ docB = loadDoc("gotesting.html")
+ }
+ return docB
+}
+
+func DocW() *Document {
+ if docW == nil {
+ docW = loadDoc("gowiki.html")
+ }
+ return docW
+}
+
+func assertLength(t *testing.T, nodes []*html.Node, length int) {
+ if len(nodes) != length {
+ t.Errorf("Expected %d nodes, found %d.", length, len(nodes))
+ for i, n := range nodes {
+ t.Logf("Node %d: %+v.", i, n)
+ }
+ }
+}
+
+func assertClass(t *testing.T, sel *Selection, class string) {
+ if !sel.HasClass(class) {
+ t.Errorf("Expected node to have class %s, found %+v.", class, sel.Get(0))
+ }
+}
+
+func assertPanic(t *testing.T) {
+ if e := recover(); e == nil {
+ t.Error("Expected a panic.")
+ }
+}
+
+func assertEqual(t *testing.T, s1 *Selection, s2 *Selection) {
+ if s1 != s2 {
+ t.Error("Expected selection objects to be the same.")
+ }
+}
+
+func assertSelectionIs(t *testing.T, sel *Selection, is ...string) {
+ for i := 0; i < sel.Length(); i++ {
+ if !sel.Eq(i).Is(is[i]) {
+ t.Errorf("Expected node %d to be %s, found %+v", i, is[i], sel.Get(i))
+ }
+ }
+}
+
+func printSel(t *testing.T, sel *Selection) {
+ if testing.Verbose() {
+ h, err := sel.Html()
+ if err != nil {
+ t.Fatal(err)
+ }
+ t.Log(h)
+ }
+}
+
+func loadDoc(page string) *Document {
+ var f *os.File
+ var e error
+
+ if f, e = os.Open(fmt.Sprintf("./testdata/%s", page)); e != nil {
+ panic(e.Error())
+ }
+ defer f.Close()
+
+ var node *html.Node
+ if node, e = html.Parse(f); e != nil {
+ panic(e.Error())
+ }
+ return NewDocumentFromNode(node)
+}
+
+func TestNewDocument(t *testing.T) {
+ if f, e := os.Open("./testdata/page.html"); e != nil {
+ t.Error(e.Error())
+ } else {
+ defer f.Close()
+ if node, e := html.Parse(f); e != nil {
+ t.Error(e.Error())
+ } else {
+ doc = NewDocumentFromNode(node)
+ }
+ }
+}
+
+func TestNewDocumentFromReader(t *testing.T) {
+ cases := []struct {
+ src string
+ err bool
+ sel string
+ cnt int
+ }{
+ 0: {
+ src: `
+
+
+Test
+
+
Hi
+
+`,
+ sel: "h1",
+ cnt: 1,
+ },
+ 1: {
+ // Actually pretty hard to make html.Parse return an error
+ // based on content...
+ src: `>>qq>`,
+ },
+ }
+ buf := bytes.NewBuffer(nil)
+
+ for i, c := range cases {
+ buf.Reset()
+ buf.WriteString(c.src)
+
+ d, e := NewDocumentFromReader(buf)
+ if (e != nil) != c.err {
+ if c.err {
+ t.Errorf("[%d] - expected error, got none", i)
+ } else {
+ t.Errorf("[%d] - expected no error, got %s", i, e)
+ }
+ }
+ if c.sel != "" {
+ s := d.Find(c.sel)
+ if s.Length() != c.cnt {
+ t.Errorf("[%d] - expected %d nodes, found %d", i, c.cnt, s.Length())
+ }
+ }
+ }
+}
+
+func TestNewDocumentFromResponseNil(t *testing.T) {
+ _, e := NewDocumentFromResponse(nil)
+ if e == nil {
+ t.Error("Expected error, got none")
+ }
+}
+
+func TestIssue103(t *testing.T) {
+ d, err := NewDocumentFromReader(strings.NewReader("Scientists Stored These Images in DNA—Then Flawlessly Retrieved Them"))
+ if err != nil {
+ t.Error(err)
+ }
+ text := d.Find("title").Text()
+ for i, r := range text {
+ t.Logf("%d: %d - %q\n", i, r, string(r))
+ }
+ t.Log(text)
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/utilities.go b/vendor/github.com/PuerkitoBio/goquery/utilities.go
new file mode 100644
index 0000000..b4c061a
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/utilities.go
@@ -0,0 +1,161 @@
+package goquery
+
+import (
+ "bytes"
+
+ "golang.org/x/net/html"
+)
+
+// used to determine if a set (map[*html.Node]bool) should be used
+// instead of iterating over a slice. The set uses more memory and
+// is slower than slice iteration for small N.
+const minNodesForSet = 1000
+
+var nodeNames = []string{
+ html.ErrorNode: "#error",
+ html.TextNode: "#text",
+ html.DocumentNode: "#document",
+ html.CommentNode: "#comment",
+}
+
+// NodeName returns the node name of the first element in the selection.
+// It tries to behave in a similar way as the DOM's nodeName property
+// (https://developer.mozilla.org/en-US/docs/Web/API/Node/nodeName).
+//
+// Go's net/html package defines the following node types, listed with
+// the corresponding returned value from this function:
+//
+// ErrorNode : #error
+// TextNode : #text
+// DocumentNode : #document
+// ElementNode : the element's tag name
+// CommentNode : #comment
+// DoctypeNode : the name of the document type
+//
+func NodeName(s *Selection) string {
+ if s.Length() == 0 {
+ return ""
+ }
+ switch n := s.Get(0); n.Type {
+ case html.ElementNode, html.DoctypeNode:
+ return n.Data
+ default:
+ if n.Type >= 0 && int(n.Type) < len(nodeNames) {
+ return nodeNames[n.Type]
+ }
+ return ""
+ }
+}
+
+// OuterHtml returns the outer HTML rendering of the first item in
+// the selection - that is, the HTML including the first element's
+// tag and attributes.
+//
+// Unlike InnerHtml, this is a function and not a method on the Selection,
+// because this is not a jQuery method (in javascript-land, this is
+// a property provided by the DOM).
+func OuterHtml(s *Selection) (string, error) {
+ var buf bytes.Buffer
+
+ if s.Length() == 0 {
+ return "", nil
+ }
+ n := s.Get(0)
+ if err := html.Render(&buf, n); err != nil {
+ return "", err
+ }
+ return buf.String(), nil
+}
+
+// Loop through all container nodes to search for the target node.
+func sliceContains(container []*html.Node, contained *html.Node) bool {
+ for _, n := range container {
+ if nodeContains(n, contained) {
+ return true
+ }
+ }
+
+ return false
+}
+
+// Checks if the contained node is within the container node.
+func nodeContains(container *html.Node, contained *html.Node) bool {
+ // Check if the parent of the contained node is the container node, traversing
+ // upward until the top is reached, or the container is found.
+ for contained = contained.Parent; contained != nil; contained = contained.Parent {
+ if container == contained {
+ return true
+ }
+ }
+ return false
+}
+
+// Checks if the target node is in the slice of nodes.
+func isInSlice(slice []*html.Node, node *html.Node) bool {
+ return indexInSlice(slice, node) > -1
+}
+
+// Returns the index of the target node in the slice, or -1.
+func indexInSlice(slice []*html.Node, node *html.Node) int {
+ if node != nil {
+ for i, n := range slice {
+ if n == node {
+ return i
+ }
+ }
+ }
+ return -1
+}
+
+// Appends the new nodes to the target slice, making sure no duplicate is added.
+// There is no check to the original state of the target slice, so it may still
+// contain duplicates. The target slice is returned because append() may create
+// a new underlying array. If targetSet is nil, a local set is created with the
+// target if len(target) + len(nodes) is greater than minNodesForSet.
+func appendWithoutDuplicates(target []*html.Node, nodes []*html.Node, targetSet map[*html.Node]bool) []*html.Node {
+ // if there are not that many nodes, don't use the map, faster to just use nested loops
+ // (unless a non-nil targetSet is passed, in which case the caller knows better).
+ if targetSet == nil && len(target)+len(nodes) < minNodesForSet {
+ for _, n := range nodes {
+ if !isInSlice(target, n) {
+ target = append(target, n)
+ }
+ }
+ return target
+ }
+
+ // if a targetSet is passed, then assume it is reliable, otherwise create one
+ // and initialize it with the current target contents.
+ if targetSet == nil {
+ targetSet = make(map[*html.Node]bool, len(target))
+ for _, n := range target {
+ targetSet[n] = true
+ }
+ }
+ for _, n := range nodes {
+ if !targetSet[n] {
+ target = append(target, n)
+ targetSet[n] = true
+ }
+ }
+
+ return target
+}
+
+// Loop through a selection, returning only those nodes that pass the predicate
+// function.
+func grep(sel *Selection, predicate func(i int, s *Selection) bool) (result []*html.Node) {
+ for i, n := range sel.Nodes {
+ if predicate(i, newSingleSelection(n, sel.document)) {
+ result = append(result, n)
+ }
+ }
+ return result
+}
+
+// Creates a new Selection object based on the specified nodes, and keeps the
+// source Selection object on the stack (linked list).
+func pushStack(fromSel *Selection, nodes []*html.Node) *Selection {
+ result := &Selection{nodes, fromSel.document, fromSel}
+ return result
+}
diff --git a/vendor/github.com/PuerkitoBio/goquery/utilities_test.go b/vendor/github.com/PuerkitoBio/goquery/utilities_test.go
new file mode 100644
index 0000000..c8e9d54
--- /dev/null
+++ b/vendor/github.com/PuerkitoBio/goquery/utilities_test.go
@@ -0,0 +1,128 @@
+package goquery
+
+import (
+ "reflect"
+ "sort"
+ "strings"
+ "testing"
+
+ "golang.org/x/net/html"
+)
+
+var allNodes = `
+
+
+
+
+
+
`},
+ {nComment, nil, ""},
+ {nText, nil, `
+ This is some text.
+ `},
+ {nil, sHeaders, ``},
+ }
+ for i, c := range cases {
+ if c.sel == nil {
+ c.sel = newSingleSelection(c.node, doc)
+ }
+ got, err := OuterHtml(c.sel)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if got != c.want {
+ t.Errorf("%d: want %q, got %q", i, c.want, got)
+ }
+ }
+}
diff --git a/vendor/github.com/andybalholm/cascadia/LICENSE b/vendor/github.com/andybalholm/cascadia/LICENSE
new file mode 100755
index 0000000..ee5ad35
--- /dev/null
+++ b/vendor/github.com/andybalholm/cascadia/LICENSE
@@ -0,0 +1,24 @@
+Copyright (c) 2011 Andy Balholm. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/andybalholm/cascadia/README.md b/vendor/github.com/andybalholm/cascadia/README.md
new file mode 100644
index 0000000..26f4c37
--- /dev/null
+++ b/vendor/github.com/andybalholm/cascadia/README.md
@@ -0,0 +1,9 @@
+# cascadia
+
+[![](https://travis-ci.org/andybalholm/cascadia.svg)](https://travis-ci.org/andybalholm/cascadia)
+
+The Cascadia package implements CSS selectors for use with the parse trees produced by the html package.
+
+To test CSS selectors without writing Go code, check out [cascadia](https://github.com/suntong/cascadia) the command line tool, a thin wrapper around this package.
+
+[Refer to godoc here](https://godoc.org/github.com/andybalholm/cascadia).
diff --git a/vendor/github.com/andybalholm/cascadia/benchmark_test.go b/vendor/github.com/andybalholm/cascadia/benchmark_test.go
new file mode 100644
index 0000000..42bf500
--- /dev/null
+++ b/vendor/github.com/andybalholm/cascadia/benchmark_test.go
@@ -0,0 +1,53 @@
+package cascadia
+
+import (
+ "strings"
+ "testing"
+
+ "golang.org/x/net/html"
+)
+
+func MustParseHTML(doc string) *html.Node {
+ dom, err := html.Parse(strings.NewReader(doc))
+ if err != nil {
+ panic(err)
+ }
+ return dom
+}
+
+var selector = MustCompile(`div.matched`)
+var doc = `
+
+
+
`,
+ `[href#=(^https:\/\/[^\/]*\/?news)]`,
+ []string{
+ ``,
+ },
+ },
+ {
+ ``,
+ `:input`,
+ []string{
+ ``,
+ ``,
+ ``,
+ ``,
+ ``,
+ },
+ },
+ {
+ ``,
+ ":root",
+ []string{
+ "",
+ },
+ },
+ {
+ ``,
+ "*:root",
+ []string{
+ "",
+ },
+ },
+ {
+ ``,
+ "*:root:first-child",
+ []string{},
+ },
+ {
+ ``,
+ "*:root:nth-child(1)",
+ []string{},
+ },
+ {
+ ``,
+ "a:not(:root)",
+ []string{
+ ``,
+ },
+ },
+}
+
+func TestSelectors(t *testing.T) {
+ for _, test := range selectorTests {
+ s, err := Compile(test.selector)
+ if err != nil {
+ t.Errorf("error compiling %q: %s", test.selector, err)
+ continue
+ }
+
+ doc, err := html.Parse(strings.NewReader(test.HTML))
+ if err != nil {
+ t.Errorf("error parsing %q: %s", test.HTML, err)
+ continue
+ }
+
+ matches := s.MatchAll(doc)
+ if len(matches) != len(test.results) {
+ t.Errorf("selector %s wanted %d elements, got %d instead", test.selector, len(test.results), len(matches))
+ continue
+ }
+
+ for i, m := range matches {
+ got := nodeString(m)
+ if got != test.results[i] {
+ t.Errorf("selector %s wanted %s, got %s instead", test.selector, test.results[i], got)
+ }
+ }
+
+ firstMatch := s.MatchFirst(doc)
+ if len(test.results) == 0 {
+ if firstMatch != nil {
+ t.Errorf("MatchFirst: selector %s want nil, got %s", test.selector, nodeString(firstMatch))
+ }
+ } else {
+ got := nodeString(firstMatch)
+ if got != test.results[0] {
+ t.Errorf("MatchFirst: selector %s want %s, got %s", test.selector, test.results[0], got)
+ }
+ }
+ }
+}
diff --git a/vendor/github.com/antchfx/htmlquery/LICENSE b/vendor/github.com/antchfx/htmlquery/LICENSE
new file mode 100644
index 0000000..e14c371
--- /dev/null
+++ b/vendor/github.com/antchfx/htmlquery/LICENSE
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
\ No newline at end of file
diff --git a/vendor/github.com/antchfx/htmlquery/README.md b/vendor/github.com/antchfx/htmlquery/README.md
new file mode 100644
index 0000000..3d16f55
--- /dev/null
+++ b/vendor/github.com/antchfx/htmlquery/README.md
@@ -0,0 +1,93 @@
+htmlquery
+====
+[![Build Status](https://travis-ci.org/antchfx/htmlquery.svg?branch=master)](https://travis-ci.org/antchfx/htmlquery)
+[![Coverage Status](https://coveralls.io/repos/github/antchfx/htmlquery/badge.svg?branch=master)](https://coveralls.io/github/antchfx/htmlquery?branch=master)
+[![GoDoc](https://godoc.org/github.com/antchfx/htmlquery?status.svg)](https://godoc.org/github.com/antchfx/htmlquery)
+[![Go Report Card](https://goreportcard.com/badge/github.com/antchfx/htmlquery)](https://goreportcard.com/report/github.com/antchfx/htmlquery)
+
+Overview
+====
+
+htmlquery is an XPath query package for HTML, lets you extract data or evaluate from HTML documents by an XPath expression.
+
+Installation
+====
+
+> $ go get github.com/antchfx/htmlquery
+
+Getting Started
+====
+
+#### Load HTML document from URL.
+
+```go
+doc, err := htmlquery.LoadURL("http://example.com/")
+```
+
+#### Load HTML document from string.
+
+```go
+s := `....`
+doc, err := htmlquery.Parse(strings.NewReader(s))
+```
+
+#### Find all A elements.
+
+```go
+list := htmlquery.Find(doc, "//a")
+```
+
+#### Find all A elements that have `href` attribute.
+
+```go
+list := range htmlquery.Find(doc, "//a[@href]")
+```
+
+#### Find all A elements and only get `href` attribute self.
+
+```go
+list := range htmlquery.Find(doc, "//a/@href")
+```
+
+### Find the third A element.
+
+```go
+a := htmlquery.FindOne(doc, "//a[3]")
+```
+
+#### Evaluate the number of all IMG element.
+
+```go
+expr, _ := xpath.Compile("count(//img)")
+v := expr.Evaluate(htmlquery.CreateXPathNavigator(doc)).(float64)
+fmt.Printf("total count is %f", v)
+```
+
+Quick Tutorial
+===
+
+```go
+func main() {
+ doc, err := htmlquery.LoadURL("https://www.bing.com/search?q=golang")
+ if err != nil {
+ panic(err)
+ }
+ // Find all news item.
+ for i, n := range htmlquery.Find(doc, "//ol/li") {
+ a := htmlquery.FindOne(n, "//a")
+ fmt.Printf("%d %s(%s)\n", i, htmlquery.InnerText(a), htmlquery.SelectAttr(a, "href"))
+ }
+}
+```
+
+List of supported XPath query packages
+===
+|Name |Description |
+|--------------------------|----------------|
+|[htmlquery](https://github.com/antchfx/htmlquery) | XPath query package for the HTML document|
+|[xmlquery](https://github.com/antchfx/xmlquery) | XPath query package for the XML document|
+|[jsonquery](https://github.com/antchfx/jsonquery) | XPath query package for the JSON document|
+
+Questions
+===
+Please let me know if you have any questions.
diff --git a/vendor/github.com/antchfx/htmlquery/query.go b/vendor/github.com/antchfx/htmlquery/query.go
new file mode 100644
index 0000000..3b93256
--- /dev/null
+++ b/vendor/github.com/antchfx/htmlquery/query.go
@@ -0,0 +1,299 @@
+/*
+Package htmlquery provides extract data from HTML documents using XPath expression.
+*/
+package htmlquery
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "net/http"
+
+ "github.com/antchfx/xpath"
+ "golang.org/x/net/html"
+ "golang.org/x/net/html/charset"
+)
+
+var _ xpath.NodeNavigator = &NodeNavigator{}
+
+// CreateXPathNavigator creates a new xpath.NodeNavigator for the specified html.Node.
+func CreateXPathNavigator(top *html.Node) *NodeNavigator {
+ return &NodeNavigator{curr: top, root: top, attr: -1}
+}
+
+// Find searches the html.Node that matches by the specified XPath expr.
+func Find(top *html.Node, expr string) []*html.Node {
+ var elems []*html.Node
+ exp, err := xpath.Compile(expr)
+ if err != nil {
+ panic(err)
+ }
+ t := exp.Select(CreateXPathNavigator(top))
+ for t.MoveNext() {
+ elems = append(elems, getCurrentNode(t))
+ }
+ return elems
+}
+
+// FindOne searches the html.Node that matches by the specified XPath expr,
+// and returns first element of matched html.Node.
+func FindOne(top *html.Node, expr string) *html.Node {
+ var elem *html.Node
+ exp, err := xpath.Compile(expr)
+ if err != nil {
+ panic(err)
+ }
+ t := exp.Select(CreateXPathNavigator(top))
+ if t.MoveNext() {
+ elem = getCurrentNode(t)
+ }
+ return elem
+}
+
+// FindEach searches the html.Node and calls functions cb.
+func FindEach(top *html.Node, expr string, cb func(int, *html.Node)) {
+ exp, err := xpath.Compile(expr)
+ if err != nil {
+ panic(err)
+ }
+ t := exp.Select(CreateXPathNavigator(top))
+ i := 0
+ for t.MoveNext() {
+ cb(i, getCurrentNode(t))
+ i++
+ }
+}
+
+// LoadURL loads the HTML document from the specified URL.
+func LoadURL(url string) (*html.Node, error) {
+ resp, err := http.Get(url)
+ if err != nil {
+ return nil, err
+ }
+ defer resp.Body.Close()
+
+ r, err := charset.NewReader(resp.Body, resp.Header.Get("Content-Type"))
+ if err != nil {
+ return nil, err
+ }
+ return html.Parse(r)
+}
+
+func getCurrentNode(it *xpath.NodeIterator) *html.Node {
+ n := it.Current().(*NodeNavigator)
+ if n.NodeType() == xpath.AttributeNode {
+ childNode := &html.Node{
+ Type: html.TextNode,
+ Data: n.Value(),
+ }
+ return &html.Node{
+ Type: html.ElementNode,
+ Data: n.LocalName(),
+ FirstChild: childNode,
+ LastChild: childNode,
+ }
+
+ }
+ return n.curr
+}
+
+// Parse returns the parse tree for the HTML from the given Reader.
+func Parse(r io.Reader) (*html.Node, error) {
+ return html.Parse(r)
+}
+
+// InnerText returns the text between the start and end tags of the object.
+func InnerText(n *html.Node) string {
+ var output func(*bytes.Buffer, *html.Node)
+ output = func(buf *bytes.Buffer, n *html.Node) {
+ switch n.Type {
+ case html.TextNode:
+ buf.WriteString(n.Data)
+ return
+ case html.CommentNode:
+ return
+ }
+ for child := n.FirstChild; child != nil; child = child.NextSibling {
+ output(buf, child)
+ }
+ }
+
+ var buf bytes.Buffer
+ output(&buf, n)
+ return buf.String()
+}
+
+// SelectAttr returns the attribute value with the specified name.
+func SelectAttr(n *html.Node, name string) (val string) {
+ if n == nil {
+ return
+ }
+ if n.Type == html.ElementNode && n.Parent == nil && name == n.Data {
+ return InnerText(n)
+ }
+ for _, attr := range n.Attr {
+ if attr.Key == name {
+ val = attr.Val
+ break
+ }
+ }
+ return
+}
+
+// OutputHTML returns the text including tags name.
+func OutputHTML(n *html.Node, self bool) string {
+ var buf bytes.Buffer
+ if self {
+ html.Render(&buf, n)
+ } else {
+ for n := n.FirstChild; n != nil; n = n.NextSibling {
+ html.Render(&buf, n)
+ }
+ }
+ return buf.String()
+}
+
+type NodeNavigator struct {
+ root, curr *html.Node
+ attr int
+}
+
+func (h *NodeNavigator) Current() *html.Node {
+ return h.curr
+}
+
+func (h *NodeNavigator) NodeType() xpath.NodeType {
+ switch h.curr.Type {
+ case html.CommentNode:
+ return xpath.CommentNode
+ case html.TextNode:
+ return xpath.TextNode
+ case html.DocumentNode:
+ return xpath.RootNode
+ case html.ElementNode:
+ if h.attr != -1 {
+ return xpath.AttributeNode
+ }
+ return xpath.ElementNode
+ case html.DoctypeNode:
+ // ignored declare and as Root-Node type.
+ return xpath.RootNode
+ }
+ panic(fmt.Sprintf("unknown HTML node type: %v", h.curr.Type))
+}
+
+func (h *NodeNavigator) LocalName() string {
+ if h.attr != -1 {
+ return h.curr.Attr[h.attr].Key
+ }
+ return h.curr.Data
+}
+
+func (*NodeNavigator) Prefix() string {
+ return ""
+}
+
+func (h *NodeNavigator) Value() string {
+ switch h.curr.Type {
+ case html.CommentNode:
+ return h.curr.Data
+ case html.ElementNode:
+ if h.attr != -1 {
+ return h.curr.Attr[h.attr].Val
+ }
+ return InnerText(h.curr)
+ case html.TextNode:
+ return h.curr.Data
+ }
+ return ""
+}
+
+func (h *NodeNavigator) Copy() xpath.NodeNavigator {
+ n := *h
+ return &n
+}
+
+func (h *NodeNavigator) MoveToRoot() {
+ h.curr = h.root
+}
+
+func (h *NodeNavigator) MoveToParent() bool {
+ if h.attr != -1 {
+ h.attr = -1
+ return true
+ } else if node := h.curr.Parent; node != nil {
+ h.curr = node
+ return true
+ }
+ return false
+}
+
+func (h *NodeNavigator) MoveToNextAttribute() bool {
+ if h.attr >= len(h.curr.Attr)-1 {
+ return false
+ }
+ h.attr++
+ return true
+}
+
+func (h *NodeNavigator) MoveToChild() bool {
+ if h.attr != -1 {
+ return false
+ }
+ if node := h.curr.FirstChild; node != nil {
+ h.curr = node
+ return true
+ }
+ return false
+}
+
+func (h *NodeNavigator) MoveToFirst() bool {
+ if h.attr != -1 || h.curr.PrevSibling == nil {
+ return false
+ }
+ for {
+ node := h.curr.PrevSibling
+ if node == nil {
+ break
+ }
+ h.curr = node
+ }
+ return true
+}
+
+func (h *NodeNavigator) String() string {
+ return h.Value()
+}
+
+func (h *NodeNavigator) MoveToNext() bool {
+ if h.attr != -1 {
+ return false
+ }
+ if node := h.curr.NextSibling; node != nil {
+ h.curr = node
+ return true
+ }
+ return false
+}
+
+func (h *NodeNavigator) MoveToPrevious() bool {
+ if h.attr != -1 {
+ return false
+ }
+ if node := h.curr.PrevSibling; node != nil {
+ h.curr = node
+ return true
+ }
+ return false
+}
+
+func (h *NodeNavigator) MoveTo(other xpath.NodeNavigator) bool {
+ node, ok := other.(*NodeNavigator)
+ if !ok || node.root != h.root {
+ return false
+ }
+
+ h.curr = node.curr
+ h.attr = node.attr
+ return true
+}
diff --git a/vendor/github.com/antchfx/htmlquery/query_test.go b/vendor/github.com/antchfx/htmlquery/query_test.go
new file mode 100644
index 0000000..d3503b7
--- /dev/null
+++ b/vendor/github.com/antchfx/htmlquery/query_test.go
@@ -0,0 +1,126 @@
+package htmlquery
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/antchfx/xpath"
+ "golang.org/x/net/html"
+)
+
+const htmlSample = `
+
+Hello,World!
+
+
+
+
+
+
City Gallery
+
+
+
+
London
+
+
London is the capital city of England. It is the most populous city in the United Kingdom, with a metropolitan area of over 13 million inhabitants.
+
Standing on the River Thames, London has been a major settlement for two millennia, its history going back to its founding by the Romans, who named it Londinium.
+
+
+
+
+
+`
+
+var testDoc = loadHTML(htmlSample)
+
+func TestHttpLoad(t *testing.T) {
+ doc, err := LoadURL("http://www.bing.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if doc == nil {
+ t.Fatal("doc is nil")
+ }
+}
+
+func TestNavigator(t *testing.T) {
+ top := FindOne(testDoc, "//html")
+ nav := &NodeNavigator{curr: top, root: top, attr: -1}
+ nav.MoveToChild() // HEAD
+ nav.MoveToNext()
+ if nav.NodeType() != xpath.TextNode {
+ t.Fatalf("expectd node type is TextNode,but got %vs", nav.NodeType())
+ }
+ nav.MoveToNext() //
+ if nav.Value() != InnerText(FindOne(testDoc, "//body")) {
+ t.Fatal("body not equal")
+ }
+ nav.MoveToPrevious() //
+ nav.MoveToParent() //
+ if nav.curr != top {
+ t.Fatal("current node is not html node")
+ }
+ nav.MoveToNextAttribute()
+ if nav.LocalName() != "lang" {
+ t.Fatal("node not move to lang attribute")
+ }
+
+ nav.MoveToParent()
+ nav.MoveToFirst() //
+ if nav.curr.Type != html.DoctypeNode {
+ t.Fatalf("expected node type is DoctypeNode,but got %d", nav.curr.Type)
+ }
+}
+
+func TestXPath(t *testing.T) {
+ node := FindOne(testDoc, "//html")
+ if SelectAttr(node, "lang") != "en-US" {
+ t.Fatal("//html[@lang] != en-Us")
+ }
+
+ var c int
+ FindEach(testDoc, "//li", func(i int, node *html.Node) {
+ c++
+ })
+ if c != len(Find(testDoc, "//li")) {
+ t.Fatal("li node count != 3")
+ }
+ node = FindOne(testDoc, "//header")
+ if strings.Index(InnerText(node), "Logo") > 0 {
+ t.Fatal("InnerText() have comment node text")
+ }
+ if strings.Index(OutputHTML(node, true), "Logo") == -1 {
+ t.Fatal("OutputHTML() shoud have comment node text")
+ }
+ link := FindOne(testDoc, "//a[1]/@href")
+ if link == nil {
+ t.Fatal("link is nil")
+ }
+ if v := InnerText(link); v != "/London" {
+ t.Fatalf("expect value is /London, but got %s", v)
+ }
+
+}
+
+func TestXPathCdUp(t *testing.T) {
+ doc := loadHTML(``)
+ node := FindOne(doc, "//b/@attr/..")
+ t.Logf("node = %#v", node)
+ if node == nil || node.Data != "b" {
+ t.Fatal("//b/@id/.. != ")
+ }
+}
+
+func loadHTML(str string) *html.Node {
+ node, err := Parse(strings.NewReader(str))
+ if err != nil {
+ panic(err)
+ }
+ return node
+}
diff --git a/vendor/github.com/antchfx/xmlquery/LICENSE b/vendor/github.com/antchfx/xmlquery/LICENSE
new file mode 100644
index 0000000..e14c371
--- /dev/null
+++ b/vendor/github.com/antchfx/xmlquery/LICENSE
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
\ No newline at end of file
diff --git a/vendor/github.com/antchfx/xmlquery/README.md b/vendor/github.com/antchfx/xmlquery/README.md
new file mode 100644
index 0000000..12ab85f
--- /dev/null
+++ b/vendor/github.com/antchfx/xmlquery/README.md
@@ -0,0 +1,169 @@
+xmlquery
+====
+[![Build Status](https://travis-ci.org/antchfx/xmlquery.svg?branch=master)](https://travis-ci.org/antchfx/xmlquery)
+[![Coverage Status](https://coveralls.io/repos/github/antchfx/xmlquery/badge.svg?branch=master)](https://coveralls.io/github/antchfx/xmlquery?branch=master)
+[![GoDoc](https://godoc.org/github.com/antchfx/xmlquery?status.svg)](https://godoc.org/github.com/antchfx/xmlquery)
+[![Go Report Card](https://goreportcard.com/badge/github.com/antchfx/xmlquery)](https://goreportcard.com/report/github.com/antchfx/xmlquery)
+
+Overview
+===
+
+xmlquery is an XPath query package for XML document, lets you extract data or evaluate from XML documents by an XPath expression.
+
+Installation
+====
+
+> $ go get github.com/antchfx/xmlquery
+
+Getting Started
+===
+
+#### Parse a XML from URL.
+
+```go
+doc, err := xmlquery.LoadURL("http://www.example.com/sitemap.xml")
+```
+
+#### Parse a XML from string.
+
+```go
+s := ``
+doc, err := xmlquery.Parse(strings.NewReader(s))
+```
+
+#### Parse a XML from io.Reader.
+
+```go
+f, err := os.Open("../books.xml")
+doc, err := xmlquery.Parse(f)
+```
+
+#### Find authors of all books in the bookstore.
+
+```go
+list := xmlquery.Find(doc, "//book//author")
+// or
+list := xmlquery.Find(doc, "//author")
+```
+
+#### Find the second book.
+
+```go
+book := xmlquery.FindOne(doc, "//book[2]")
+```
+
+#### Find all book elements and only get `id` attribute self. (New Feature)
+
+```go
+list := xmlquery.Find(doc,"//book/@id")
+```
+
+#### Find all books with id is bk104.
+
+```go
+list := xmlquery.Find(doc, "//book[@id='bk104']")
+```
+
+#### Find all books that price less than 5.
+
+```go
+list := xmlquery.Find(doc, "//book[price<5]")
+```
+
+#### Evaluate the total price of all books.
+
+```go
+expr, err := xpath.Compile("sum(//book/price)")
+price := expr.Evaluate(xmlquery.CreateXPathNavigator(doc)).(float64)
+fmt.Printf("total price: %f\n", price)
+```
+
+#### Evaluate the number of all books element.
+
+```go
+expr, err := xpath.Compile("count(//book)")
+price := expr.Evaluate(xmlquery.CreateXPathNavigator(doc)).(float64)
+```
+
+#### Create XML document.
+
+```go
+doc := &xmlquery.Node{
+ Type: xmlquery.DeclarationNode,
+ Data: "xml",
+ Attr: []xml.Attr{
+ xml.Attr{Name: xml.Name{Local: "version"}, Value: "1.0"},
+ },
+}
+root := &xmlquery.Node{
+ Data: "rss",
+ Type: xmlquery.ElementNode,
+}
+doc.FirstChild = root
+channel := &xmlquery.Node{
+ Data: "channel",
+ Type: xmlquery.ElementNode,
+}
+root.FirstChild = channel
+title := &xmlquery.Node{
+ Data: "title",
+ Type: xmlquery.ElementNode,
+}
+title_text := &xmlquery.Node{
+ Data: "W3Schools Home Page",
+ Type: xmlquery.TextNode,
+}
+title.FirstChild = title_text
+channel.FirstChild = title
+fmt.Println(doc.OutputXML(true))
+// W3Schools Home Page
+```
+
+Quick Tutorial
+===
+
+```go
+func main(){
+ s := `
+
+
+ W3Schools Home Page
+ https://www.w3schools.com
+ Free web building tutorials
+
+ RSS Tutorial
+ https://www.w3schools.com/xml/xml_rss.asp
+ New RSS tutorial on W3Schools
+
+
+ XML Tutorial
+ https://www.w3schools.com/xml
+ New XML tutorial on W3Schools
+
+
+`
+
+ doc, err := Parse(strings.NewReader(s))
+ if err != nil {
+ panic(err)
+ }
+ channel := FindOne(doc, "//channel")
+ fmt.Printf("title: %s\n", channel.SelectElement("title").InnerText())
+ fmt.Printf("link: %s\n", channel.SelectElement("link").InnerText())
+ for i, n := range Find(doc, "//item") {
+ fmt.Printf("#%d %s\n", i, n.SelectElement("title"))
+ }
+}
+```
+
+List of supported XPath query packages
+===
+|Name |Description |
+|--------------------------|----------------|
+|[htmlquery](https://github.com/antchfx/htmlquery) | XPath query package for the HTML document|
+|[xmlquery](https://github.com/antchfx/xmlquery) | XPath query package for the XML document|
+|[jsonquery](https://github.com/antchfx/jsonquery) | XPath query package for the JSON document|
+
+Questions
+===
+Please let me know if you have any questions
diff --git a/vendor/github.com/antchfx/xmlquery/books.xml b/vendor/github.com/antchfx/xmlquery/books.xml
new file mode 100644
index 0000000..85a74b5
--- /dev/null
+++ b/vendor/github.com/antchfx/xmlquery/books.xml
@@ -0,0 +1,121 @@
+
+
+
+
+ Gambardella, Matthew
+ XML Developer's Guide
+ Computer
+ 44.95
+ 2000-10-01
+ An in-depth look at creating applications
+ with XML.
+
+
+ Ralls, Kim
+ Midnight Rain
+ Fantasy
+ 5.95
+ 2000-12-16
+ A former architect battles corporate zombies,
+ an evil sorceress, and her own childhood to become queen
+ of the world.
+
+
+ Corets, Eva
+ Maeve Ascendant
+ Fantasy
+ 5.95
+ 2000-11-17
+ After the collapse of a nanotechnology
+ society in England, the young survivors lay the
+ foundation for a new society.
+
+
+ Corets, Eva
+ Oberon's Legacy
+ Fantasy
+ 5.95
+ 2001-03-10
+ In post-apocalypse England, the mysterious
+ agent known only as Oberon helps to create a new life
+ for the inhabitants of London. Sequel to Maeve
+ Ascendant.
+
+
+ Corets, Eva
+ The Sundered Grail
+ Fantasy
+ 5.95
+ 2001-09-10
+ The two daughters of Maeve, half-sisters,
+ battle one another for control of England. Sequel to
+ Oberon's Legacy.
+
+
+ Randall, Cynthia
+ Lover Birds
+ Romance
+ 4.95
+ 2000-09-02
+ When Carla meets Paul at an ornithology
+ conference, tempers fly as feathers get ruffled.
+
+
+ Thurman, Paula
+ Splish Splash
+ Romance
+ 4.95
+ 2000-11-02
+ A deep sea diver finds true love twenty
+ thousand leagues beneath the sea.
+
+
+ Knorr, Stefan
+ Creepy Crawlies
+ Horror
+ 4.95
+ 2000-12-06
+ An anthology of horror stories about roaches,
+ centipedes, scorpions and other insects.
+
+
+ Kress, Peter
+ Paradox Lost
+ Science Fiction
+ 6.95
+ 2000-11-02
+ After an inadvertant trip through a Heisenberg
+ Uncertainty Device, James Salway discovers the problems
+ of being quantum.
+
+
+ O'Brien, Tim
+ Microsoft .NET: The Programming Bible
+ Computer
+ 36.95
+ 2000-12-09
+ Microsoft's .NET initiative is explored in
+ detail in this deep programmer's reference.
+
+
+ O'Brien, Tim
+ MSXML3: A Comprehensive Guide
+ Computer
+ 36.95
+ 2000-12-01
+ The Microsoft MSXML3 parser is covered in
+ detail, with attention to XML DOM interfaces, XSLT processing,
+ SAX and more.
+
+
+ Galos, Mike
+ Visual Studio 7: A Comprehensive Guide
+ Computer
+ 49.95
+ 2001-04-16
+ Microsoft Visual Studio 7 is explored in depth,
+ looking at how Visual Basic, Visual C++, C#, and ASP+ are
+ integrated into a comprehensive development
+ environment.
+
+
\ No newline at end of file
diff --git a/vendor/github.com/antchfx/xmlquery/node.go b/vendor/github.com/antchfx/xmlquery/node.go
new file mode 100644
index 0000000..40b065b
--- /dev/null
+++ b/vendor/github.com/antchfx/xmlquery/node.go
@@ -0,0 +1,281 @@
+package xmlquery
+
+import (
+ "bytes"
+ "encoding/xml"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "strings"
+
+ "golang.org/x/net/html/charset"
+)
+
+// A NodeType is the type of a Node.
+type NodeType uint
+
+const (
+ // DocumentNode is a document object that, as the root of the document tree,
+ // provides access to the entire XML document.
+ DocumentNode NodeType = iota
+ // DeclarationNode is the document type declaration, indicated by the following
+ // tag (for example, ).
+ DeclarationNode
+ // ElementNode is an element (for example, ).
+ ElementNode
+ // TextNode is the text content of a node.
+ TextNode
+ // CommentNode a comment (for example, ).
+ CommentNode
+ // AttributeNode is an attribute of element.
+ AttributeNode
+)
+
+// A Node consists of a NodeType and some Data (tag name for
+// element nodes, content for text) and are part of a tree of Nodes.
+type Node struct {
+ Parent, FirstChild, LastChild, PrevSibling, NextSibling *Node
+
+ Type NodeType
+ Data string
+ Prefix string
+ NamespaceURI string
+ Attr []xml.Attr
+
+ level int // node level in the tree
+}
+
+// InnerText returns the text between the start and end tags of the object.
+func (n *Node) InnerText() string {
+ var output func(*bytes.Buffer, *Node)
+ output = func(buf *bytes.Buffer, n *Node) {
+ switch n.Type {
+ case TextNode:
+ buf.WriteString(n.Data)
+ return
+ case CommentNode:
+ return
+ }
+ for child := n.FirstChild; child != nil; child = child.NextSibling {
+ output(buf, child)
+ }
+ }
+
+ var buf bytes.Buffer
+ output(&buf, n)
+ return buf.String()
+}
+
+func outputXML(buf *bytes.Buffer, n *Node) {
+ if n.Type == TextNode || n.Type == CommentNode {
+ xml.EscapeText(buf, []byte(strings.TrimSpace(n.Data)))
+ return
+ }
+ if n.Type == DeclarationNode {
+ buf.WriteString("" + n.Data)
+ } else {
+ if n.Prefix == "" {
+ buf.WriteString("<" + n.Data)
+ }else{
+ buf.WriteString("<" + n.Prefix + ":" + n.Data)
+ }
+ }
+
+ for _, attr := range n.Attr {
+ if attr.Name.Space != "" {
+ buf.WriteString(fmt.Sprintf(` %s:%s="%s"`, attr.Name.Space, attr.Name.Local, attr.Value))
+ } else {
+ buf.WriteString(fmt.Sprintf(` %s="%s"`, attr.Name.Local, attr.Value))
+ }
+ }
+ if n.Type == DeclarationNode {
+ buf.WriteString("?>")
+ } else {
+ buf.WriteString(">")
+ }
+ for child := n.FirstChild; child != nil; child = child.NextSibling {
+ outputXML(buf, child)
+ }
+ if n.Type != DeclarationNode {
+ if n.Prefix == "" {
+ buf.WriteString(fmt.Sprintf("%s>", n.Data))
+ }else{
+ buf.WriteString(fmt.Sprintf("%s:%s>", n.Prefix, n.Data))
+ }
+ }
+}
+
+// OutputXML returns the text that including tags name.
+func (n *Node) OutputXML(self bool) string {
+ var buf bytes.Buffer
+ if self {
+ outputXML(&buf, n)
+ } else {
+ for n := n.FirstChild; n != nil; n = n.NextSibling {
+ outputXML(&buf, n)
+ }
+ }
+
+ return buf.String()
+}
+
+func addAttr(n *Node, key, val string) {
+ var attr xml.Attr
+ if i := strings.Index(key, ":"); i > 0 {
+ attr = xml.Attr{
+ Name: xml.Name{Space: key[:i], Local: key[i+1:]},
+ Value: val,
+ }
+ } else {
+ attr = xml.Attr{
+ Name: xml.Name{Local: key},
+ Value: val,
+ }
+ }
+
+ n.Attr = append(n.Attr, attr)
+}
+
+func addChild(parent, n *Node) {
+ n.Parent = parent
+ if parent.FirstChild == nil {
+ parent.FirstChild = n
+ } else {
+ parent.LastChild.NextSibling = n
+ n.PrevSibling = parent.LastChild
+ }
+
+ parent.LastChild = n
+}
+
+func addSibling(sibling, n *Node) {
+ for t := sibling.NextSibling; t != nil; t = t.NextSibling {
+ sibling = t
+ }
+ n.Parent = sibling.Parent
+ sibling.NextSibling = n
+ n.PrevSibling = sibling
+ if sibling.Parent != nil {
+ sibling.Parent.LastChild = n
+ }
+}
+
+// LoadURL loads the XML document from the specified URL.
+func LoadURL(url string) (*Node, error) {
+ resp, err := http.Get(url)
+ if err != nil {
+ return nil, err
+ }
+ defer resp.Body.Close()
+ return parse(resp.Body)
+}
+
+func parse(r io.Reader) (*Node, error) {
+ var (
+ decoder = xml.NewDecoder(r)
+ doc = &Node{Type: DocumentNode}
+ space2prefix = make(map[string]string)
+ level = 0
+ )
+ decoder.CharsetReader = charset.NewReaderLabel
+ prev := doc
+ for {
+ tok, err := decoder.Token()
+ switch {
+ case err == io.EOF:
+ goto quit
+ case err != nil:
+ return nil, err
+ }
+
+ switch tok := tok.(type) {
+ case xml.StartElement:
+ if level == 0 {
+ // mising XML declaration
+ node := &Node{Type: DeclarationNode, Data: "xml", level: 1}
+ addChild(prev, node)
+ level = 1
+ prev = node
+ }
+ // https://www.w3.org/TR/xml-names/#scoping-defaulting
+ for _, att := range tok.Attr {
+ if att.Name.Local == "xmlns" {
+ space2prefix[att.Value] = ""
+ } else if att.Name.Space == "xmlns" {
+ space2prefix[att.Value] = att.Name.Local
+ }
+ }
+
+ if tok.Name.Space != "" {
+ if _, found := space2prefix[tok.Name.Space]; !found {
+ return nil, errors.New("xmlquery: invalid XML document, namespace is missing")
+ }
+ }
+ node := &Node{
+ Type: ElementNode,
+ Data: tok.Name.Local,
+ Prefix: space2prefix[tok.Name.Space],
+ NamespaceURI: tok.Name.Space,
+ Attr: tok.Attr,
+ level: level,
+ }
+ //fmt.Println(fmt.Sprintf("start > %s : %d", node.Data, level))
+ if level == prev.level {
+ addSibling(prev, node)
+ } else if level > prev.level {
+ addChild(prev, node)
+ } else if level < prev.level {
+ for i := prev.level - level; i > 1; i-- {
+ prev = prev.Parent
+ }
+ addSibling(prev.Parent, node)
+ }
+ prev = node
+ level++
+ case xml.EndElement:
+ level--
+ case xml.CharData:
+ node := &Node{Type: TextNode, Data: string(tok), level: level}
+ if level == prev.level {
+ addSibling(prev, node)
+ } else if level > prev.level {
+ addChild(prev, node)
+ }
+ case xml.Comment:
+ node := &Node{Type: CommentNode, Data: string(tok), level: level}
+ if level == prev.level {
+ addSibling(prev, node)
+ } else if level > prev.level {
+ addChild(prev, node)
+ }
+ case xml.ProcInst: // Processing Instruction
+ if prev.Type != DeclarationNode {
+ level++
+ }
+ node := &Node{Type: DeclarationNode, Data: tok.Target, level: level}
+ pairs := strings.Split(string(tok.Inst), " ")
+ for _, pair := range pairs {
+ pair = strings.TrimSpace(pair)
+ if i := strings.Index(pair, "="); i > 0 {
+ addAttr(node, pair[:i], strings.Trim(pair[i+1:], `"`))
+ }
+ }
+ if level == prev.level {
+ addSibling(prev, node)
+ } else if level > prev.level {
+ addChild(prev, node)
+ }
+ prev = node
+ case xml.Directive:
+ }
+
+ }
+quit:
+ return doc, nil
+}
+
+// Parse returns the parse tree for the XML from the given Reader.
+func Parse(r io.Reader) (*Node, error) {
+ return parse(r)
+}
diff --git a/vendor/github.com/antchfx/xmlquery/node_test.go b/vendor/github.com/antchfx/xmlquery/node_test.go
new file mode 100644
index 0000000..524b532
--- /dev/null
+++ b/vendor/github.com/antchfx/xmlquery/node_test.go
@@ -0,0 +1,308 @@
+package xmlquery
+
+import (
+ "net/http"
+ "net/http/httptest"
+ "strings"
+ "testing"
+)
+
+func findNode(root *Node, name string) *Node {
+ node := root.FirstChild
+ for {
+ if node == nil || node.Data == name {
+ break
+ }
+ node = node.NextSibling
+ }
+ return node
+}
+
+func childNodes(root *Node, name string) []*Node {
+ var list []*Node
+ node := root.FirstChild
+ for {
+ if node == nil {
+ break
+ }
+ if node.Data == name {
+ list = append(list, node)
+ }
+ node = node.NextSibling
+ }
+ return list
+}
+
+func testNode(t *testing.T, n *Node, expected string) {
+ if n.Data != expected {
+ t.Fatalf("expected node name is %s,but got %s", expected, n.Data)
+ }
+}
+
+func testAttr(t *testing.T, n *Node, name, expected string) {
+ for _, attr := range n.Attr {
+ if attr.Name.Local == name && attr.Value == expected {
+ return
+ }
+ }
+ t.Fatalf("not found attribute %s in the node %s", name, n.Data)
+}
+
+func testValue(t *testing.T, val, expected string) {
+ if val != expected {
+ t.Fatalf("expected value is %s,but got %s", expected, val)
+ }
+}
+
+func TestLoadURL(t *testing.T) {
+ server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ s := `
+
+
+ `
+ w.Header().Set("Content-Type", "text/xml")
+ w.Write([]byte(s))
+ }))
+ defer server.Close()
+ _, err := LoadURL(server.URL)
+ if err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestNamespaceURL(t *testing.T) {
+ s := `
+
+
+
+
+21|22021348
+
+ `
+ doc, err := Parse(strings.NewReader(s))
+ if err != nil {
+ t.Fatal(err)
+ }
+ top := FindOne(doc, "//rss")
+ if top == nil {
+ t.Fatal("rss feed invalid")
+ }
+ node := FindOne(top, "dc:creator")
+ if node.Prefix != "dc" {
+ t.Fatalf("expected node prefix name is dc but is=%s", node.Prefix)
+ }
+ if node.NamespaceURI != "https://purl.org/dc/elements/1.1/" {
+ t.Fatalf("dc:creator != %s", node.NamespaceURI)
+ }
+ if strings.Index(top.InnerText(), "author") > 0 {
+ t.Fatalf("InnerText() include comment node text")
+ }
+ if strings.Index(top.OutputXML(true), "author") == -1 {
+ t.Fatal("OutputXML shoud include comment node,but not")
+ }
+}
+
+func TestMultipleProcInst(t *testing.T) {
+ s := `
+
+
+
+
+ `
+ doc, err := Parse(strings.NewReader(s))
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ node := doc.FirstChild //
+ if node.Data != "xml" {
+ t.Fatal("node.Data != xml")
+ }
+ node = node.NextSibling // New Line
+ node = node.NextSibling //
+ if node.Data != "xml-stylesheet" {
+ t.Fatal("node.Data != xml-stylesheet")
+ }
+}
+
+func TestParse(t *testing.T) {
+ s := `
+
+
+ Harry Potter
+ 29.99
+
+
+ Learning XML
+ 39.95
+
+`
+ root, err := Parse(strings.NewReader(s))
+ if err != nil {
+ t.Error(err)
+ }
+ if root.Type != DocumentNode {
+ t.Fatal("top node of tree is not DocumentNode")
+ }
+
+ declarNode := root.FirstChild
+ if declarNode.Type != DeclarationNode {
+ t.Fatal("first child node of tree is not DeclarationNode")
+ }
+
+ if declarNode.Attr[0].Name.Local != "version" && declarNode.Attr[0].Value != "1.0" {
+ t.Fatal("version attribute not expected")
+ }
+
+ bookstore := root.LastChild
+ if bookstore.Data != "bookstore" {
+ t.Fatal("bookstore elem not found")
+ }
+ if bookstore.FirstChild.Data != "\n" {
+ t.Fatal("first child node of bookstore is not empty node(\n)")
+ }
+ books := childNodes(bookstore, "book")
+ if len(books) != 2 {
+ t.Fatalf("expected book element count is 2, but got %d", len(books))
+ }
+ // first book element
+ testNode(t, findNode(books[0], "title"), "title")
+ testAttr(t, findNode(books[0], "title"), "lang", "en")
+ testValue(t, findNode(books[0], "price").InnerText(), "29.99")
+ testValue(t, findNode(books[0], "title").InnerText(), "Harry Potter")
+
+ // second book element
+ testNode(t, findNode(books[1], "title"), "title")
+ testAttr(t, findNode(books[1], "title"), "lang", "en")
+ testValue(t, findNode(books[1], "price").InnerText(), "39.95")
+
+ testValue(t, books[0].OutputXML(true), `Harry Potter29.99`)
+}
+
+func TestMissDeclaration(t *testing.T) {
+ s := `
+
+
+ `
+ doc, err := Parse(strings.NewReader(s))
+ if err != nil {
+ t.Fatal(err)
+ }
+ node := FindOne(doc, "//AAA")
+ if node == nil {
+ t.Fatal("//AAA is nil")
+ }
+}
+
+func TestMissingNamespace(t *testing.T) {
+ s := `
+ value 1
+ value 2
+ `
+ _, err := Parse(strings.NewReader(s))
+ if err == nil {
+ t.Fatal("err is nil, want got invalid XML document")
+ }
+}
+
+func TestTooNested(t *testing.T) {
+ s := `
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ `
+ root, err := Parse(strings.NewReader(s))
+ if err != nil {
+ t.Error(err)
+ }
+ aaa := findNode(root, "AAA")
+ if aaa == nil {
+ t.Fatal("AAA node not exists")
+ }
+ ccc := aaa.LastChild
+ if ccc.Data != "CCC" {
+ t.Fatalf("expected node is CCC,but got %s", ccc.Data)
+ }
+ bbb := ccc.PrevSibling
+ if bbb.Data != "BBB" {
+ t.Fatalf("expected node is bbb,but got %s", bbb.Data)
+ }
+ ddd := findNode(bbb, "DDD")
+ testNode(t, ddd, "DDD")
+ testNode(t, ddd.LastChild, "CCC")
+}
+
+func TestSelectElement(t *testing.T) {
+ s := `
+
+
+
+
+
+
+
+
+ `
+ root, err := Parse(strings.NewReader(s))
+ if err != nil {
+ t.Error(err)
+ }
+ version := root.FirstChild.SelectAttr("version")
+ if version != "1.0" {
+ t.Fatal("version!=1.0")
+ }
+ aaa := findNode(root, "AAA")
+ var n *Node
+ n = aaa.SelectElement("BBB")
+ if n == nil {
+ t.Fatalf("n is nil")
+ }
+ n = aaa.SelectElement("CCC")
+ if n == nil {
+ t.Fatalf("n is nil")
+ }
+
+ var ns []*Node
+ ns = aaa.SelectElements("CCC")
+ if len(ns) != 2 {
+ t.Fatalf("len(ns)!=2")
+ }
+}
+
+func TestEscapeOutputValue(t *testing.T) {
+ data := `<*>`
+
+ root, err := Parse(strings.NewReader(data))
+ if err != nil {
+ t.Error(err)
+ }
+
+ escapedInnerText := root.OutputXML(true)
+ if !strings.Contains(escapedInnerText, "<*>") {
+ t.Fatal("Inner Text has not been escaped")
+ }
+
+}
+func TestOutputXMLWithNamespacePrefix(t *testing.T) {
+ s := ``
+ doc, _ := Parse(strings.NewReader(s))
+ if s != doc.OutputXML(false) {
+ t.Fatal("xml document missing some characters")
+ }
+}
diff --git a/vendor/github.com/antchfx/xmlquery/query.go b/vendor/github.com/antchfx/xmlquery/query.go
new file mode 100644
index 0000000..bc24516
--- /dev/null
+++ b/vendor/github.com/antchfx/xmlquery/query.go
@@ -0,0 +1,256 @@
+/*
+Package xmlquery provides extract data from XML documents using XPath expression.
+*/
+package xmlquery
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/antchfx/xpath"
+)
+
+// SelectElements finds child elements with the specified name.
+func (n *Node) SelectElements(name string) []*Node {
+ return Find(n, name)
+}
+
+// SelectElement finds child elements with the specified name.
+func (n *Node) SelectElement(name string) *Node {
+ return FindOne(n, name)
+}
+
+// SelectAttr returns the attribute value with the specified name.
+func (n *Node) SelectAttr(name string) string {
+ if n.Type == AttributeNode {
+ if n.Data == name {
+ return n.InnerText()
+ }
+ return ""
+ }
+ var local, space string
+ local = name
+ if i := strings.Index(name, ":"); i > 0 {
+ space = name[:i]
+ local = name[i+1:]
+ }
+ for _, attr := range n.Attr {
+ if attr.Name.Local == local && attr.Name.Space == space {
+ return attr.Value
+ }
+ }
+ return ""
+}
+
+var _ xpath.NodeNavigator = &NodeNavigator{}
+
+// CreateXPathNavigator creates a new xpath.NodeNavigator for the specified html.Node.
+func CreateXPathNavigator(top *Node) *NodeNavigator {
+ return &NodeNavigator{curr: top, root: top, attr: -1}
+}
+
+func getCurrentNode(it *xpath.NodeIterator) *Node {
+ n := it.Current().(*NodeNavigator)
+ if n.NodeType() == xpath.AttributeNode {
+ childNode := &Node{
+ Type: TextNode,
+ Data: n.Value(),
+ }
+ return &Node{
+ Type: AttributeNode,
+ Data: n.LocalName(),
+ FirstChild: childNode,
+ LastChild: childNode,
+ }
+ }
+ return n.curr
+}
+
+// Find searches the Node that matches by the specified XPath expr.
+func Find(top *Node, expr string) []*Node {
+ exp, err := xpath.Compile(expr)
+ if err != nil {
+ panic(err)
+ }
+ t := exp.Select(CreateXPathNavigator(top))
+ var elems []*Node
+ for t.MoveNext() {
+ elems = append(elems, getCurrentNode(t))
+ }
+ return elems
+}
+
+// FindOne searches the Node that matches by the specified XPath expr,
+// and returns first element of matched.
+func FindOne(top *Node, expr string) *Node {
+ exp, err := xpath.Compile(expr)
+ if err != nil {
+ panic(err)
+ }
+ t := exp.Select(CreateXPathNavigator(top))
+ var elem *Node
+ if t.MoveNext() {
+ elem = getCurrentNode(t)
+ }
+ return elem
+}
+
+// FindEach searches the html.Node and calls functions cb.
+func FindEach(top *Node, expr string, cb func(int, *Node)) {
+ exp, err := xpath.Compile(expr)
+ if err != nil {
+ panic(err)
+ }
+ t := exp.Select(CreateXPathNavigator(top))
+ var i int
+ for t.MoveNext() {
+ cb(i, getCurrentNode(t))
+ i++
+ }
+}
+
+type NodeNavigator struct {
+ root, curr *Node
+ attr int
+}
+
+func (x *NodeNavigator) Current() *Node {
+ return x.curr
+}
+
+func (x *NodeNavigator) NodeType() xpath.NodeType {
+ switch x.curr.Type {
+ case CommentNode:
+ return xpath.CommentNode
+ case TextNode:
+ return xpath.TextNode
+ case DeclarationNode, DocumentNode:
+ return xpath.RootNode
+ case ElementNode:
+ if x.attr != -1 {
+ return xpath.AttributeNode
+ }
+ return xpath.ElementNode
+ }
+ panic(fmt.Sprintf("unknown XML node type: %v", x.curr.Type))
+}
+
+func (x *NodeNavigator) LocalName() string {
+ if x.attr != -1 {
+ return x.curr.Attr[x.attr].Name.Local
+ }
+ return x.curr.Data
+
+}
+
+func (x *NodeNavigator) Prefix() string {
+ if x.NodeType() == xpath.AttributeNode {
+ return ""
+ }
+ return x.curr.Prefix
+}
+
+func (x *NodeNavigator) Value() string {
+ switch x.curr.Type {
+ case CommentNode:
+ return x.curr.Data
+ case ElementNode:
+ if x.attr != -1 {
+ return x.curr.Attr[x.attr].Value
+ }
+ return x.curr.InnerText()
+ case TextNode:
+ return x.curr.Data
+ }
+ return ""
+}
+
+func (x *NodeNavigator) Copy() xpath.NodeNavigator {
+ n := *x
+ return &n
+}
+
+func (x *NodeNavigator) MoveToRoot() {
+ x.curr = x.root
+}
+
+func (x *NodeNavigator) MoveToParent() bool {
+ if x.attr != -1 {
+ x.attr = -1
+ return true
+ } else if node := x.curr.Parent; node != nil {
+ x.curr = node
+ return true
+ }
+ return false
+}
+
+func (x *NodeNavigator) MoveToNextAttribute() bool {
+ if x.attr >= len(x.curr.Attr)-1 {
+ return false
+ }
+ x.attr++
+ return true
+}
+
+func (x *NodeNavigator) MoveToChild() bool {
+ if x.attr != -1 {
+ return false
+ }
+ if node := x.curr.FirstChild; node != nil {
+ x.curr = node
+ return true
+ }
+ return false
+}
+
+func (x *NodeNavigator) MoveToFirst() bool {
+ if x.attr != -1 || x.curr.PrevSibling == nil {
+ return false
+ }
+ for {
+ node := x.curr.PrevSibling
+ if node == nil {
+ break
+ }
+ x.curr = node
+ }
+ return true
+}
+
+func (x *NodeNavigator) String() string {
+ return x.Value()
+}
+
+func (x *NodeNavigator) MoveToNext() bool {
+ if x.attr != -1 {
+ return false
+ }
+ if node := x.curr.NextSibling; node != nil {
+ x.curr = node
+ return true
+ }
+ return false
+}
+
+func (x *NodeNavigator) MoveToPrevious() bool {
+ if x.attr != -1 {
+ return false
+ }
+ if node := x.curr.PrevSibling; node != nil {
+ x.curr = node
+ return true
+ }
+ return false
+}
+
+func (x *NodeNavigator) MoveTo(other xpath.NodeNavigator) bool {
+ node, ok := other.(*NodeNavigator)
+ if !ok || node.root != x.root {
+ return false
+ }
+
+ x.curr = node.curr
+ x.attr = node.attr
+ return true
+}
diff --git a/vendor/github.com/antchfx/xmlquery/query_test.go b/vendor/github.com/antchfx/xmlquery/query_test.go
new file mode 100644
index 0000000..fe24c89
--- /dev/null
+++ b/vendor/github.com/antchfx/xmlquery/query_test.go
@@ -0,0 +1,115 @@
+package xmlquery
+
+import (
+ "strings"
+ "testing"
+)
+
+// https://msdn.microsoft.com/en-us/library/ms762271(v=vs.85).aspx
+const xmlDoc = `
+
+
+
+
+ Gambardella, Matthew
+ XML Developer's Guide
+ Computer
+ 44.95
+ 2000-10-01
+ An in-depth look at creating applications
+ with XML.
+
+
+ Ralls, Kim
+ Midnight Rain
+ Fantasy
+ 5.95
+ 2000-12-16
+ A former architect battles corporate zombies,
+ an evil sorceress, and her own childhood to become queen
+ of the world.
+
+
+ Corets, Eva
+ Maeve Ascendant
+ Fantasy
+ 5.95
+ 2000-11-17
+ After the collapse of a nanotechnology
+ society in England, the young survivors lay the
+ foundation for a new society.
+
+`
+
+var doc = loadXML(xmlDoc)
+
+func TestXPath(t *testing.T) {
+ if list := Find(doc, "//book"); len(list) != 3 {
+ t.Fatal("count(//book) != 3")
+ }
+ if node := FindOne(doc, "//book[@id='bk101']"); node == nil {
+ t.Fatal("//book[@id='bk101] is not found")
+ }
+ if node := FindOne(doc, "//book[price>=44.95]"); node == nil {
+ t.Fatal("//book/price>=44.95 is not found")
+ }
+ if list := Find(doc, "//book[genre='Fantasy']"); len(list) != 2 {
+ t.Fatal("//book[genre='Fantasy'] items count is not equal 2")
+ }
+ var c int
+ FindEach(doc, "//book", func(i int, n *Node) {
+ c++
+ })
+ if c != len(Find(doc, "//book")) {
+ t.Fatal("count(//book) != 3")
+ }
+ node := FindOne(doc, "//book[1]")
+ if node.SelectAttr("id") != "bk101" {
+ t.Fatal("//book[1]/@id != bk101")
+ }
+}
+
+func TestXPathCdUp(t *testing.T) {
+ doc := loadXML(``)
+ node := FindOne(doc, "/a/b/@attr/..")
+ t.Logf("node = %#v", node)
+ if node == nil || node.Data != "b" {
+ t.Fatal("//b/@id/.. != ")
+ }
+}
+
+func TestNavigator(t *testing.T) {
+ nav := &NodeNavigator{curr: doc, root: doc, attr: -1}
+ nav.MoveToChild() // New Line
+ nav.MoveToNext()
+ nav.MoveToNext() // catalog
+ if nav.curr.Data != "catalog" {
+ t.Fatal("current node name != `catalog`")
+ }
+ nav.MoveToChild() // New Line
+ nav.MoveToNext() // comment node
+ if nav.curr.Type != CommentNode {
+ t.Fatal("node type not CommentNode")
+ }
+ nav.Value()
+ nav.MoveToNext() // New Line
+ nav.MoveToNext() //book
+ nav.MoveToChild()
+ nav.MoveToNext() // book/author
+ if nav.LocalName() != "author" {
+ t.Fatalf("node error")
+ }
+ nav.MoveToParent() // book
+ nav.MoveToNext() // next book
+ if nav.curr.SelectAttr("id") != "bk102" {
+ t.Fatal("node error")
+ }
+}
+
+func loadXML(s string) *Node {
+ node, err := Parse(strings.NewReader(s))
+ if err != nil {
+ panic(err)
+ }
+ return node
+}
diff --git a/vendor/github.com/antchfx/xpath/LICENSE b/vendor/github.com/antchfx/xpath/LICENSE
new file mode 100644
index 0000000..e14c371
--- /dev/null
+++ b/vendor/github.com/antchfx/xpath/LICENSE
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
\ No newline at end of file
diff --git a/vendor/github.com/antchfx/xpath/README.md b/vendor/github.com/antchfx/xpath/README.md
new file mode 100644
index 0000000..e5c399a
--- /dev/null
+++ b/vendor/github.com/antchfx/xpath/README.md
@@ -0,0 +1,155 @@
+XPath
+====
+[![GoDoc](https://godoc.org/github.com/antchfx/xpath?status.svg)](https://godoc.org/github.com/antchfx/xpath)
+[![Coverage Status](https://coveralls.io/repos/github/antchfx/xpath/badge.svg?branch=master)](https://coveralls.io/github/antchfx/xpath?branch=master)
+[![Build Status](https://travis-ci.org/antchfx/xpath.svg?branch=master)](https://travis-ci.org/antchfx/xpath)
+[![Go Report Card](https://goreportcard.com/badge/github.com/antchfx/xpath)](https://goreportcard.com/report/github.com/antchfx/xpath)
+
+XPath is Go package provides selecting nodes from XML, HTML or other documents using XPath expression.
+
+Implementation
+===
+
+- [htmlquery](https://github.com/antchfx/htmlquery) - an XPath query package for HTML document
+
+- [xmlquery](https://github.com/antchfx/xmlquery) - an XPath query package for XML document.
+
+- [jsonquery](https://github.com/antchfx/jsonquery) - an XPath query package for JSON document
+
+Supported Features
+===
+
+#### The basic XPath patterns.
+
+> The basic XPath patterns cover 90% of the cases that most stylesheets will need.
+
+- `node` : Selects all child elements with nodeName of node.
+
+- `*` : Selects all child elements.
+
+- `@attr` : Selects the attribute attr.
+
+- `@*` : Selects all attributes.
+
+- `node()` : Matches an org.w3c.dom.Node.
+
+- `text()` : Matches a org.w3c.dom.Text node.
+
+- `comment()` : Matches a comment.
+
+- `.` : Selects the current node.
+
+- `..` : Selects the parent of current node.
+
+- `/` : Selects the document node.
+
+- `a[expr]` : Select only those nodes matching a which also satisfy the expression expr.
+
+- `a[n]` : Selects the nth matching node matching a When a filter's expression is a number, XPath selects based on position.
+
+- `a/b` : For each node matching a, add the nodes matching b to the result.
+
+- `a//b` : For each node matching a, add the descendant nodes matching b to the result.
+
+- `//b` : Returns elements in the entire document matching b.
+
+- `a|b` : All nodes matching a or b, union operation(not boolean or).
+
+#### Node Axes
+
+- `child::*` : The child axis selects children of the current node.
+
+- `descendant::*` : The descendant axis selects descendants of the current node. It is equivalent to '//'.
+
+- `descendant-or-self::*` : Selects descendants including the current node.
+
+- `attribute::*` : Selects attributes of the current element. It is equivalent to @*
+
+- `following-sibling::*` : Selects nodes after the current node.
+
+- `preceding-sibling::*` : Selects nodes before the current node.
+
+- `following::*` : Selects the first matching node following in document order, excluding descendants.
+
+- `preceding::*` : Selects the first matching node preceding in document order, excluding ancestors.
+
+- `parent::*` : Selects the parent if it matches. The '..' pattern from the core is equivalent to 'parent::node()'.
+
+- `ancestor::*` : Selects matching ancestors.
+
+- `ancestor-or-self::*` : Selects ancestors including the current node.
+
+- `self::*` : Selects the current node. '.' is equivalent to 'self::node()'.
+
+#### Expressions
+
+ The gxpath supported three types: number, boolean, string.
+
+- `path` : Selects nodes based on the path.
+
+- `a = b` : Standard comparisons.
+
+ * a = b True if a equals b.
+ * a != b True if a is not equal to b.
+ * a < b True if a is less than b.
+ * a <= b True if a is less than or equal to b.
+ * a > b True if a is greater than b.
+ * a >= b True if a is greater than or equal to b.
+
+- `a + b` : Arithmetic expressions.
+
+ * `- a` Unary minus
+ * a + b Add
+ * a - b Substract
+ * a * b Multiply
+ * a div b Divide
+ * a mod b Floating point mod, like Java.
+
+- `a or b` : Boolean `or` operation.
+
+- `a and b` : Boolean `and` operation.
+
+- `(expr)` : Parenthesized expressions.
+
+- `fun(arg1, ..., argn)` : Function calls:
+
+| Function | Supported |
+| --- | --- |
+`boolean()`| ✓ |
+`ceiling()`| ✓ |
+`choose()`| ✗ |
+`concat()`| ✓ |
+`contains()`| ✓ |
+`count()`| ✓ |
+`current()`| ✗ |
+`document()`| ✗ |
+`element-available()`| ✗ |
+`ends-with()`| ✓ |
+`false()`| ✓ |
+`floor()`| ✓ |
+`format-number()`| ✗ |
+`function-available()`| ✗ |
+`generate-id()`| ✗ |
+`id()`| ✗ |
+`key()`| ✗ |
+`lang()`| ✗ |
+`last()`| ✓ |
+`local-name()`| ✓ |
+`name()`| ✓ |
+`namespace-uri()`| ✓ |
+`normalize-space()`| ✓ |
+`not()`| ✓ |
+`number()`| ✓ |
+`position()`| ✓ |
+`round()`| ✓ |
+`starts-with()`| ✓ |
+`string()`| ✓ |
+`string-length()`| ✓ |
+`substring()`| ✓ |
+`substring-after()`| ✓ |
+`substring-before()`| ✓ |
+`sum()`| ✓ |
+`system-property()`| ✗ |
+`translate()`| ✓ |
+`true()`| ✓ |
+`unparsed-entity-url()` | ✗ |
\ No newline at end of file
diff --git a/vendor/github.com/antchfx/xpath/build.go b/vendor/github.com/antchfx/xpath/build.go
new file mode 100644
index 0000000..74f266b
--- /dev/null
+++ b/vendor/github.com/antchfx/xpath/build.go
@@ -0,0 +1,483 @@
+package xpath
+
+import (
+ "errors"
+ "fmt"
+)
+
+type flag int
+
+const (
+ noneFlag flag = iota
+ filterFlag
+)
+
+// builder provides building an XPath expressions.
+type builder struct {
+ depth int
+ flag flag
+ firstInput query
+}
+
+// axisPredicate creates a predicate to predicating for this axis node.
+func axisPredicate(root *axisNode) func(NodeNavigator) bool {
+ // get current axix node type.
+ typ := ElementNode
+ switch root.AxeType {
+ case "attribute":
+ typ = AttributeNode
+ case "self", "parent":
+ typ = allNode
+ default:
+ switch root.Prop {
+ case "comment":
+ typ = CommentNode
+ case "text":
+ typ = TextNode
+ // case "processing-instruction":
+ // typ = ProcessingInstructionNode
+ case "node":
+ typ = allNode
+ }
+ }
+ nametest := root.LocalName != "" || root.Prefix != ""
+ predicate := func(n NodeNavigator) bool {
+ if typ == n.NodeType() || typ == allNode || typ == TextNode {
+ if nametest {
+ if root.LocalName == n.LocalName() && root.Prefix == n.Prefix() {
+ return true
+ }
+ } else {
+ return true
+ }
+ }
+ return false
+ }
+
+ return predicate
+}
+
+// processAxisNode processes a query for the XPath axis node.
+func (b *builder) processAxisNode(root *axisNode) (query, error) {
+ var (
+ err error
+ qyInput query
+ qyOutput query
+ predicate = axisPredicate(root)
+ )
+
+ if root.Input == nil {
+ qyInput = &contextQuery{}
+ } else {
+ if root.AxeType == "child" && (root.Input.Type() == nodeAxis) {
+ if input := root.Input.(*axisNode); input.AxeType == "descendant-or-self" {
+ var qyGrandInput query
+ if input.Input != nil {
+ qyGrandInput, _ = b.processNode(input.Input)
+ } else {
+ qyGrandInput = &contextQuery{}
+ }
+ qyOutput = &descendantQuery{Input: qyGrandInput, Predicate: predicate, Self: true}
+ return qyOutput, nil
+ }
+ }
+ qyInput, err = b.processNode(root.Input)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ switch root.AxeType {
+ case "ancestor":
+ qyOutput = &ancestorQuery{Input: qyInput, Predicate: predicate}
+ case "ancestor-or-self":
+ qyOutput = &ancestorQuery{Input: qyInput, Predicate: predicate, Self: true}
+ case "attribute":
+ qyOutput = &attributeQuery{Input: qyInput, Predicate: predicate}
+ case "child":
+ filter := func(n NodeNavigator) bool {
+ v := predicate(n)
+ switch root.Prop {
+ case "text":
+ v = v && n.NodeType() == TextNode
+ case "node":
+ v = v && (n.NodeType() == ElementNode || n.NodeType() == TextNode)
+ case "comment":
+ v = v && n.NodeType() == CommentNode
+ }
+ return v
+ }
+ qyOutput = &childQuery{Input: qyInput, Predicate: filter}
+ case "descendant":
+ qyOutput = &descendantQuery{Input: qyInput, Predicate: predicate}
+ case "descendant-or-self":
+ qyOutput = &descendantQuery{Input: qyInput, Predicate: predicate, Self: true}
+ case "following":
+ qyOutput = &followingQuery{Input: qyInput, Predicate: predicate}
+ case "following-sibling":
+ qyOutput = &followingQuery{Input: qyInput, Predicate: predicate, Sibling: true}
+ case "parent":
+ qyOutput = &parentQuery{Input: qyInput, Predicate: predicate}
+ case "preceding":
+ qyOutput = &precedingQuery{Input: qyInput, Predicate: predicate}
+ case "preceding-sibling":
+ qyOutput = &precedingQuery{Input: qyInput, Predicate: predicate, Sibling: true}
+ case "self":
+ qyOutput = &selfQuery{Input: qyInput, Predicate: predicate}
+ case "namespace":
+ // haha,what will you do someting??
+ default:
+ err = fmt.Errorf("unknown axe type: %s", root.AxeType)
+ return nil, err
+ }
+ return qyOutput, nil
+}
+
+// processFilterNode builds query for the XPath filter predicate.
+func (b *builder) processFilterNode(root *filterNode) (query, error) {
+ b.flag |= filterFlag
+
+ qyInput, err := b.processNode(root.Input)
+ if err != nil {
+ return nil, err
+ }
+ qyCond, err := b.processNode(root.Condition)
+ if err != nil {
+ return nil, err
+ }
+ qyOutput := &filterQuery{Input: qyInput, Predicate: qyCond}
+ return qyOutput, nil
+}
+
+// processFunctionNode processes query for the XPath function node.
+func (b *builder) processFunctionNode(root *functionNode) (query, error) {
+ var qyOutput query
+ switch root.FuncName {
+ case "starts-with":
+ arg1, err := b.processNode(root.Args[0])
+ if err != nil {
+ return nil, err
+ }
+ arg2, err := b.processNode(root.Args[1])
+ if err != nil {
+ return nil, err
+ }
+ qyOutput = &functionQuery{Input: b.firstInput, Func: startwithFunc(arg1, arg2)}
+ case "ends-with":
+ arg1, err := b.processNode(root.Args[0])
+ if err != nil {
+ return nil, err
+ }
+ arg2, err := b.processNode(root.Args[1])
+ if err != nil {
+ return nil, err
+ }
+ qyOutput = &functionQuery{Input: b.firstInput, Func: endwithFunc(arg1, arg2)}
+ case "contains":
+ arg1, err := b.processNode(root.Args[0])
+ if err != nil {
+ return nil, err
+ }
+ arg2, err := b.processNode(root.Args[1])
+ if err != nil {
+ return nil, err
+ }
+
+ qyOutput = &functionQuery{Input: b.firstInput, Func: containsFunc(arg1, arg2)}
+ case "substring":
+ //substring( string , start [, length] )
+ if len(root.Args) < 2 {
+ return nil, errors.New("xpath: substring function must have at least two parameter")
+ }
+ var (
+ arg1, arg2, arg3 query
+ err error
+ )
+ if arg1, err = b.processNode(root.Args[0]); err != nil {
+ return nil, err
+ }
+ if arg2, err = b.processNode(root.Args[1]); err != nil {
+ return nil, err
+ }
+ if len(root.Args) == 3 {
+ if arg3, err = b.processNode(root.Args[2]); err != nil {
+ return nil, err
+ }
+ }
+ qyOutput = &functionQuery{Input: b.firstInput, Func: substringFunc(arg1, arg2, arg3)}
+ case "substring-before", "substring-after":
+ //substring-xxxx( haystack, needle )
+ if len(root.Args) != 2 {
+ return nil, errors.New("xpath: substring-before function must have two parameters")
+ }
+ var (
+ arg1, arg2 query
+ err error
+ )
+ if arg1, err = b.processNode(root.Args[0]); err != nil {
+ return nil, err
+ }
+ if arg2, err = b.processNode(root.Args[1]); err != nil {
+ return nil, err
+ }
+ qyOutput = &functionQuery{
+ Input: b.firstInput,
+ Func: substringIndFunc(arg1, arg2, root.FuncName == "substring-after"),
+ }
+ case "string-length":
+ // string-length( [string] )
+ if len(root.Args) < 1 {
+ return nil, errors.New("xpath: string-length function must have at least one parameter")
+ }
+ arg1, err := b.processNode(root.Args[0])
+ if err != nil {
+ return nil, err
+ }
+ qyOutput = &functionQuery{Input: b.firstInput, Func: stringLengthFunc(arg1)}
+ case "normalize-space":
+ if len(root.Args) == 0 {
+ return nil, errors.New("xpath: normalize-space function must have at least one parameter")
+ }
+ argQuery, err := b.processNode(root.Args[0])
+ if err != nil {
+ return nil, err
+ }
+ qyOutput = &functionQuery{Input: argQuery, Func: normalizespaceFunc}
+ case "translate":
+ //translate( string , string, string )
+ if len(root.Args) != 3 {
+ return nil, errors.New("xpath: translate function must have three parameters")
+ }
+ var (
+ arg1, arg2, arg3 query
+ err error
+ )
+ if arg1, err = b.processNode(root.Args[0]); err != nil {
+ return nil, err
+ }
+ if arg2, err = b.processNode(root.Args[1]); err != nil {
+ return nil, err
+ }
+ if arg3, err = b.processNode(root.Args[2]); err != nil {
+ return nil, err
+ }
+ qyOutput = &functionQuery{Input: b.firstInput, Func: translateFunc(arg1, arg2, arg3)}
+ case "not":
+ if len(root.Args) == 0 {
+ return nil, errors.New("xpath: not function must have at least one parameter")
+ }
+ argQuery, err := b.processNode(root.Args[0])
+ if err != nil {
+ return nil, err
+ }
+ qyOutput = &functionQuery{Input: argQuery, Func: notFunc}
+ case "name", "local-name", "namespace-uri":
+ inp := b.firstInput
+ if len(root.Args) > 1 {
+ return nil, fmt.Errorf("xpath: %s function must have at most one parameter", root.FuncName)
+ }
+ if len(root.Args) == 1 {
+ argQuery, err := b.processNode(root.Args[0])
+ if err != nil {
+ return nil, err
+ }
+ inp = argQuery
+ }
+ f := &functionQuery{Input: inp}
+ switch root.FuncName {
+ case "name":
+ f.Func = nameFunc
+ case "local-name":
+ f.Func = localNameFunc
+ case "namespace-uri":
+ f.Func = namespaceFunc
+ }
+ qyOutput = f
+ case "true", "false":
+ val := root.FuncName == "true"
+ qyOutput = &functionQuery{
+ Input: b.firstInput,
+ Func: func(_ query, _ iterator) interface{} {
+ return val
+ },
+ }
+ case "last":
+ qyOutput = &functionQuery{Input: b.firstInput, Func: lastFunc}
+ case "position":
+ qyOutput = &functionQuery{Input: b.firstInput, Func: positionFunc}
+ case "boolean", "number", "string":
+ inp := b.firstInput
+ if len(root.Args) > 1 {
+ return nil, fmt.Errorf("xpath: %s function must have at most one parameter", root.FuncName)
+ }
+ if len(root.Args) == 1 {
+ argQuery, err := b.processNode(root.Args[0])
+ if err != nil {
+ return nil, err
+ }
+ inp = argQuery
+ }
+ f := &functionQuery{Input: inp}
+ switch root.FuncName {
+ case "boolean":
+ f.Func = booleanFunc
+ case "string":
+ f.Func = stringFunc
+ case "number":
+ f.Func = numberFunc
+ }
+ qyOutput = f
+ case "count":
+ //if b.firstInput == nil {
+ // return nil, errors.New("xpath: expression must evaluate to node-set")
+ //}
+ if len(root.Args) == 0 {
+ return nil, fmt.Errorf("xpath: count(node-sets) function must with have parameters node-sets")
+ }
+ argQuery, err := b.processNode(root.Args[0])
+ if err != nil {
+ return nil, err
+ }
+ qyOutput = &functionQuery{Input: argQuery, Func: countFunc}
+ case "sum":
+ if len(root.Args) == 0 {
+ return nil, fmt.Errorf("xpath: sum(node-sets) function must with have parameters node-sets")
+ }
+ argQuery, err := b.processNode(root.Args[0])
+ if err != nil {
+ return nil, err
+ }
+ qyOutput = &functionQuery{Input: argQuery, Func: sumFunc}
+ case "ceiling", "floor", "round":
+ if len(root.Args) == 0 {
+ return nil, fmt.Errorf("xpath: ceiling(node-sets) function must with have parameters node-sets")
+ }
+ argQuery, err := b.processNode(root.Args[0])
+ if err != nil {
+ return nil, err
+ }
+ f := &functionQuery{Input: argQuery}
+ switch root.FuncName {
+ case "ceiling":
+ f.Func = ceilingFunc
+ case "floor":
+ f.Func = floorFunc
+ case "round":
+ f.Func = roundFunc
+ }
+ qyOutput = f
+ case "concat":
+ if len(root.Args) < 2 {
+ return nil, fmt.Errorf("xpath: concat() must have at least two arguments")
+ }
+ var args []query
+ for _, v := range root.Args {
+ q, err := b.processNode(v)
+ if err != nil {
+ return nil, err
+ }
+ args = append(args, q)
+ }
+ qyOutput = &functionQuery{Input: b.firstInput, Func: concatFunc(args...)}
+ default:
+ return nil, fmt.Errorf("not yet support this function %s()", root.FuncName)
+ }
+ return qyOutput, nil
+}
+
+func (b *builder) processOperatorNode(root *operatorNode) (query, error) {
+ left, err := b.processNode(root.Left)
+ if err != nil {
+ return nil, err
+ }
+ right, err := b.processNode(root.Right)
+ if err != nil {
+ return nil, err
+ }
+ var qyOutput query
+ switch root.Op {
+ case "+", "-", "div", "mod": // Numeric operator
+ var exprFunc func(interface{}, interface{}) interface{}
+ switch root.Op {
+ case "+":
+ exprFunc = plusFunc
+ case "-":
+ exprFunc = minusFunc
+ case "div":
+ exprFunc = divFunc
+ case "mod":
+ exprFunc = modFunc
+ }
+ qyOutput = &numericQuery{Left: left, Right: right, Do: exprFunc}
+ case "=", ">", ">=", "<", "<=", "!=":
+ var exprFunc func(iterator, interface{}, interface{}) interface{}
+ switch root.Op {
+ case "=":
+ exprFunc = eqFunc
+ case ">":
+ exprFunc = gtFunc
+ case ">=":
+ exprFunc = geFunc
+ case "<":
+ exprFunc = ltFunc
+ case "<=":
+ exprFunc = leFunc
+ case "!=":
+ exprFunc = neFunc
+ }
+ qyOutput = &logicalQuery{Left: left, Right: right, Do: exprFunc}
+ case "or", "and":
+ isOr := false
+ if root.Op == "or" {
+ isOr = true
+ }
+ qyOutput = &booleanQuery{Left: left, Right: right, IsOr: isOr}
+ case "|":
+ qyOutput = &unionQuery{Left: left, Right: right}
+ }
+ return qyOutput, nil
+}
+
+func (b *builder) processNode(root node) (q query, err error) {
+ if b.depth = b.depth + 1; b.depth > 1024 {
+ err = errors.New("the xpath expressions is too complex")
+ return
+ }
+
+ switch root.Type() {
+ case nodeConstantOperand:
+ n := root.(*operandNode)
+ q = &constantQuery{Val: n.Val}
+ case nodeRoot:
+ q = &contextQuery{Root: true}
+ case nodeAxis:
+ q, err = b.processAxisNode(root.(*axisNode))
+ b.firstInput = q
+ case nodeFilter:
+ q, err = b.processFilterNode(root.(*filterNode))
+ case nodeFunction:
+ q, err = b.processFunctionNode(root.(*functionNode))
+ case nodeOperator:
+ q, err = b.processOperatorNode(root.(*operatorNode))
+ }
+ return
+}
+
+// build builds a specified XPath expressions expr.
+func build(expr string) (q query, err error) {
+ defer func() {
+ if e := recover(); e != nil {
+ switch x := e.(type) {
+ case string:
+ err = errors.New(x)
+ case error:
+ err = x
+ default:
+ err = errors.New("unknown panic")
+ }
+ }
+ }()
+ root := parse(expr)
+ b := &builder{}
+ return b.processNode(root)
+}
diff --git a/vendor/github.com/antchfx/xpath/doc_test.go b/vendor/github.com/antchfx/xpath/doc_test.go
new file mode 100644
index 0000000..2ef8e83
--- /dev/null
+++ b/vendor/github.com/antchfx/xpath/doc_test.go
@@ -0,0 +1,33 @@
+package xpath_test
+
+import (
+ "fmt"
+
+ "github.com/antchfx/xpath"
+)
+
+// XPath package example.
+func Example() {
+ expr, err := xpath.Compile("count(//book)")
+ if err != nil {
+ panic(err)
+ }
+ var root xpath.NodeNavigator
+ // using Evaluate() method
+ val := expr.Evaluate(root) // it returns float64 type
+ fmt.Println(val.(float64))
+
+ // using Evaluate() method
+ expr = xpath.MustCompile("//book")
+ val = expr.Evaluate(root) // it returns NodeIterator type.
+ iter := val.(*xpath.NodeIterator)
+ for iter.MoveNext() {
+ fmt.Println(iter.Current().Value())
+ }
+
+ // using Select() method
+ iter = expr.Select(root) // it always returns NodeIterator object.
+ for iter.MoveNext() {
+ fmt.Println(iter.Current().Value())
+ }
+}
diff --git a/vendor/github.com/antchfx/xpath/func.go b/vendor/github.com/antchfx/xpath/func.go
new file mode 100644
index 0000000..e662bca
--- /dev/null
+++ b/vendor/github.com/antchfx/xpath/func.go
@@ -0,0 +1,475 @@
+package xpath
+
+import (
+ "errors"
+ "fmt"
+ "math"
+ "strconv"
+ "strings"
+)
+
+// The XPath function list.
+
+func predicate(q query) func(NodeNavigator) bool {
+ type Predicater interface {
+ Test(NodeNavigator) bool
+ }
+ if p, ok := q.(Predicater); ok {
+ return p.Test
+ }
+ return func(NodeNavigator) bool { return true }
+}
+
+// positionFunc is a XPath Node Set functions position().
+func positionFunc(q query, t iterator) interface{} {
+ var (
+ count = 1
+ node = t.Current()
+ )
+ test := predicate(q)
+ for node.MoveToPrevious() {
+ if test(node) {
+ count++
+ }
+ }
+ return float64(count)
+}
+
+// lastFunc is a XPath Node Set functions last().
+func lastFunc(q query, t iterator) interface{} {
+ var (
+ count = 0
+ node = t.Current()
+ )
+ node.MoveToFirst()
+ test := predicate(q)
+ for {
+ if test(node) {
+ count++
+ }
+ if !node.MoveToNext() {
+ break
+ }
+ }
+ return float64(count)
+}
+
+// countFunc is a XPath Node Set functions count(node-set).
+func countFunc(q query, t iterator) interface{} {
+ var count = 0
+ test := predicate(q)
+ switch typ := q.Evaluate(t).(type) {
+ case query:
+ for node := typ.Select(t); node != nil; node = typ.Select(t) {
+ if test(node) {
+ count++
+ }
+ }
+ }
+ return float64(count)
+}
+
+// sumFunc is a XPath Node Set functions sum(node-set).
+func sumFunc(q query, t iterator) interface{} {
+ var sum float64
+ switch typ := q.Evaluate(t).(type) {
+ case query:
+ for node := typ.Select(t); node != nil; node = typ.Select(t) {
+ if v, err := strconv.ParseFloat(node.Value(), 64); err == nil {
+ sum += v
+ }
+ }
+ case float64:
+ sum = typ
+ case string:
+ v, err := strconv.ParseFloat(typ, 64)
+ if err != nil {
+ panic(errors.New("sum() function argument type must be a node-set or number"))
+ }
+ sum = v
+ }
+ return sum
+}
+
+func asNumber(t iterator, o interface{}) float64 {
+ switch typ := o.(type) {
+ case query:
+ node := typ.Select(t)
+ if node == nil {
+ return float64(0)
+ }
+ if v, err := strconv.ParseFloat(node.Value(), 64); err == nil {
+ return v
+ }
+ case float64:
+ return typ
+ case string:
+ v, err := strconv.ParseFloat(typ, 64)
+ if err != nil {
+ panic(errors.New("ceiling() function argument type must be a node-set or number"))
+ }
+ return v
+ }
+ return 0
+}
+
+// ceilingFunc is a XPath Node Set functions ceiling(node-set).
+func ceilingFunc(q query, t iterator) interface{} {
+ val := asNumber(t, q.Evaluate(t))
+ return math.Ceil(val)
+}
+
+// floorFunc is a XPath Node Set functions floor(node-set).
+func floorFunc(q query, t iterator) interface{} {
+ val := asNumber(t, q.Evaluate(t))
+ return math.Floor(val)
+}
+
+// roundFunc is a XPath Node Set functions round(node-set).
+func roundFunc(q query, t iterator) interface{} {
+ val := asNumber(t, q.Evaluate(t))
+ //return math.Round(val)
+ return round(val)
+}
+
+// nameFunc is a XPath functions name([node-set]).
+func nameFunc(q query, t iterator) interface{} {
+ v := q.Select(t)
+ if v == nil {
+ return ""
+ }
+ ns := v.Prefix()
+ if ns == "" {
+ return v.LocalName()
+ }
+ return ns + ":" + v.LocalName()
+}
+
+// localNameFunc is a XPath functions local-name([node-set]).
+func localNameFunc(q query, t iterator) interface{} {
+ v := q.Select(t)
+ if v == nil {
+ return ""
+ }
+ return v.LocalName()
+}
+
+// namespaceFunc is a XPath functions namespace-uri([node-set]).
+func namespaceFunc(q query, t iterator) interface{} {
+ v := q.Select(t)
+ if v == nil {
+ return ""
+ }
+ return v.Prefix()
+}
+
+func asBool(t iterator, v interface{}) bool {
+ switch v := v.(type) {
+ case nil:
+ return false
+ case *NodeIterator:
+ return v.MoveNext()
+ case bool:
+ return bool(v)
+ case float64:
+ return v != 0
+ case string:
+ return v != ""
+ case query:
+ return v.Select(t) != nil
+ default:
+ panic(fmt.Errorf("unexpected type: %T", v))
+ }
+}
+
+func asString(t iterator, v interface{}) string {
+ switch v := v.(type) {
+ case nil:
+ return ""
+ case bool:
+ if v {
+ return "true"
+ }
+ return "false"
+ case float64:
+ return strconv.FormatFloat(v, 'g', -1, 64)
+ case string:
+ return v
+ case query:
+ node := v.Select(t)
+ if node == nil {
+ return ""
+ }
+ return node.Value()
+ default:
+ panic(fmt.Errorf("unexpected type: %T", v))
+ }
+}
+
+// booleanFunc is a XPath functions boolean([node-set]).
+func booleanFunc(q query, t iterator) interface{} {
+ v := q.Evaluate(t)
+ return asBool(t, v)
+}
+
+// numberFunc is a XPath functions number([node-set]).
+func numberFunc(q query, t iterator) interface{} {
+ v := q.Evaluate(t)
+ return asNumber(t, v)
+}
+
+// stringFunc is a XPath functions string([node-set]).
+func stringFunc(q query, t iterator) interface{} {
+ v := q.Evaluate(t)
+ return asString(t, v)
+}
+
+// startwithFunc is a XPath functions starts-with(string, string).
+func startwithFunc(arg1, arg2 query) func(query, iterator) interface{} {
+ return func(q query, t iterator) interface{} {
+ var (
+ m, n string
+ ok bool
+ )
+ switch typ := arg1.Evaluate(t).(type) {
+ case string:
+ m = typ
+ case query:
+ node := typ.Select(t)
+ if node == nil {
+ return false
+ }
+ m = node.Value()
+ default:
+ panic(errors.New("starts-with() function argument type must be string"))
+ }
+ n, ok = arg2.Evaluate(t).(string)
+ if !ok {
+ panic(errors.New("starts-with() function argument type must be string"))
+ }
+ return strings.HasPrefix(m, n)
+ }
+}
+
+// endwithFunc is a XPath functions ends-with(string, string).
+func endwithFunc(arg1, arg2 query) func(query, iterator) interface{} {
+ return func(q query, t iterator) interface{} {
+ var (
+ m, n string
+ ok bool
+ )
+ switch typ := arg1.Evaluate(t).(type) {
+ case string:
+ m = typ
+ case query:
+ node := typ.Select(t)
+ if node == nil {
+ return false
+ }
+ m = node.Value()
+ default:
+ panic(errors.New("ends-with() function argument type must be string"))
+ }
+ n, ok = arg2.Evaluate(t).(string)
+ if !ok {
+ panic(errors.New("ends-with() function argument type must be string"))
+ }
+ return strings.HasSuffix(m, n)
+ }
+}
+
+// containsFunc is a XPath functions contains(string or @attr, string).
+func containsFunc(arg1, arg2 query) func(query, iterator) interface{} {
+ return func(q query, t iterator) interface{} {
+ var (
+ m, n string
+ ok bool
+ )
+
+ switch typ := arg1.Evaluate(t).(type) {
+ case string:
+ m = typ
+ case query:
+ node := typ.Select(t)
+ if node == nil {
+ return false
+ }
+ m = node.Value()
+ default:
+ panic(errors.New("contains() function argument type must be string"))
+ }
+
+ n, ok = arg2.Evaluate(t).(string)
+ if !ok {
+ panic(errors.New("contains() function argument type must be string"))
+ }
+
+ return strings.Contains(m, n)
+ }
+}
+
+// normalizespaceFunc is XPath functions normalize-space(string?)
+func normalizespaceFunc(q query, t iterator) interface{} {
+ var m string
+ switch typ := q.Evaluate(t).(type) {
+ case string:
+ m = typ
+ case query:
+ node := typ.Select(t)
+ if node == nil {
+ return false
+ }
+ m = node.Value()
+ }
+ return strings.TrimSpace(m)
+}
+
+// substringFunc is XPath functions substring function returns a part of a given string.
+func substringFunc(arg1, arg2, arg3 query) func(query, iterator) interface{} {
+ return func(q query, t iterator) interface{} {
+ var m string
+ switch typ := arg1.Evaluate(t).(type) {
+ case string:
+ m = typ
+ case query:
+ node := typ.Select(t)
+ if node == nil {
+ return ""
+ }
+ m = node.Value()
+ }
+
+ var start, length float64
+ var ok bool
+
+ if start, ok = arg2.Evaluate(t).(float64); !ok {
+ panic(errors.New("substring() function first argument type must be int"))
+ } else if start < 1 {
+ panic(errors.New("substring() function first argument type must be >= 1"))
+ }
+ start--
+ if arg3 != nil {
+ if length, ok = arg3.Evaluate(t).(float64); !ok {
+ panic(errors.New("substring() function second argument type must be int"))
+ }
+ }
+ if (len(m) - int(start)) < int(length) {
+ panic(errors.New("substring() function start and length argument out of range"))
+ }
+ if length > 0 {
+ return m[int(start):int(length+start)]
+ }
+ return m[int(start):]
+ }
+}
+
+// substringIndFunc is XPath functions substring-before/substring-after function returns a part of a given string.
+func substringIndFunc(arg1, arg2 query, after bool) func(query, iterator) interface{} {
+ return func(q query, t iterator) interface{} {
+ var str string
+ switch v := arg1.Evaluate(t).(type) {
+ case string:
+ str = v
+ case query:
+ node := v.Select(t)
+ if node == nil {
+ return ""
+ }
+ str = node.Value()
+ }
+ var word string
+ switch v := arg2.Evaluate(t).(type) {
+ case string:
+ word = v
+ case query:
+ node := v.Select(t)
+ if node == nil {
+ return ""
+ }
+ word = node.Value()
+ }
+ if word == "" {
+ return ""
+ }
+
+ i := strings.Index(str, word)
+ if i < 0 {
+ return ""
+ }
+ if after {
+ return str[i+len(word):]
+ }
+ return str[:i]
+ }
+}
+
+// stringLengthFunc is XPATH string-length( [string] ) function that returns a number
+// equal to the number of characters in a given string.
+func stringLengthFunc(arg1 query) func(query, iterator) interface{} {
+ return func(q query, t iterator) interface{} {
+ switch v := arg1.Evaluate(t).(type) {
+ case string:
+ return float64(len(v))
+ case query:
+ node := v.Select(t)
+ if node == nil {
+ break
+ }
+ return float64(len(node.Value()))
+ }
+ return float64(0)
+ }
+}
+
+// translateFunc is XPath functions translate() function returns a replaced string.
+func translateFunc(arg1, arg2, arg3 query) func(query, iterator) interface{} {
+ return func(q query, t iterator) interface{} {
+ str := asString(t, arg1.Evaluate(t))
+ src := asString(t, arg2.Evaluate(t))
+ dst := asString(t, arg3.Evaluate(t))
+
+ var replace []string
+ for i, s := range src {
+ d := ""
+ if i < len(dst) {
+ d = string(dst[i])
+ }
+ replace = append(replace, string(s), d)
+ }
+ return strings.NewReplacer(replace...).Replace(str)
+ }
+}
+
+// notFunc is XPATH functions not(expression) function operation.
+func notFunc(q query, t iterator) interface{} {
+ switch v := q.Evaluate(t).(type) {
+ case bool:
+ return !v
+ case query:
+ node := v.Select(t)
+ return node == nil
+ default:
+ return false
+ }
+}
+
+// concatFunc is the concat function concatenates two or more
+// strings and returns the resulting string.
+// concat( string1 , string2 [, stringn]* )
+func concatFunc(args ...query) func(query, iterator) interface{} {
+ return func(q query, t iterator) interface{} {
+ var a []string
+ for _, v := range args {
+ switch v := v.Evaluate(t).(type) {
+ case string:
+ a = append(a, v)
+ case query:
+ node := v.Select(t)
+ if node != nil {
+ a = append(a, node.Value())
+ }
+ }
+ }
+ return strings.Join(a, "")
+ }
+}
diff --git a/vendor/github.com/antchfx/xpath/func_go110.go b/vendor/github.com/antchfx/xpath/func_go110.go
new file mode 100644
index 0000000..500880f
--- /dev/null
+++ b/vendor/github.com/antchfx/xpath/func_go110.go
@@ -0,0 +1,9 @@
+// +build go1.10
+
+package xpath
+
+import "math"
+
+func round(f float64) int {
+ return int(math.Round(f))
+}
diff --git a/vendor/github.com/antchfx/xpath/func_pre_go110.go b/vendor/github.com/antchfx/xpath/func_pre_go110.go
new file mode 100644
index 0000000..043616b
--- /dev/null
+++ b/vendor/github.com/antchfx/xpath/func_pre_go110.go
@@ -0,0 +1,15 @@
+// +build !go1.10
+
+package xpath
+
+import "math"
+
+// math.Round() is supported by Go 1.10+,
+// This method just compatible for version <1.10.
+// https://github.com/golang/go/issues/20100
+func round(f float64) int {
+ if math.Abs(f) < 0.5 {
+ return 0
+ }
+ return int(f + math.Copysign(0.5, f))
+}
diff --git a/vendor/github.com/antchfx/xpath/operator.go b/vendor/github.com/antchfx/xpath/operator.go
new file mode 100644
index 0000000..308d3cb
--- /dev/null
+++ b/vendor/github.com/antchfx/xpath/operator.go
@@ -0,0 +1,295 @@
+package xpath
+
+import (
+ "fmt"
+ "reflect"
+ "strconv"
+)
+
+// The XPath number operator function list.
+
+// valueType is a return value type.
+type valueType int
+
+const (
+ booleanType valueType = iota
+ numberType
+ stringType
+ nodeSetType
+)
+
+func getValueType(i interface{}) valueType {
+ v := reflect.ValueOf(i)
+ switch v.Kind() {
+ case reflect.Float64:
+ return numberType
+ case reflect.String:
+ return stringType
+ case reflect.Bool:
+ return booleanType
+ default:
+ if _, ok := i.(query); ok {
+ return nodeSetType
+ }
+ }
+ panic(fmt.Errorf("xpath unknown value type: %v", v.Kind()))
+}
+
+type logical func(iterator, string, interface{}, interface{}) bool
+
+var logicalFuncs = [][]logical{
+ {cmpBooleanBoolean, nil, nil, nil},
+ {nil, cmpNumericNumeric, cmpNumericString, cmpNumericNodeSet},
+ {nil, cmpStringNumeric, cmpStringString, cmpStringNodeSet},
+ {nil, cmpNodeSetNumeric, cmpNodeSetString, cmpNodeSetNodeSet},
+}
+
+// number vs number
+func cmpNumberNumberF(op string, a, b float64) bool {
+ switch op {
+ case "=":
+ return a == b
+ case ">":
+ return a > b
+ case "<":
+ return a < b
+ case ">=":
+ return a >= b
+ case "<=":
+ return a <= b
+ case "!=":
+ return a != b
+ }
+ return false
+}
+
+// string vs string
+func cmpStringStringF(op string, a, b string) bool {
+ switch op {
+ case "=":
+ return a == b
+ case ">":
+ return a > b
+ case "<":
+ return a < b
+ case ">=":
+ return a >= b
+ case "<=":
+ return a <= b
+ case "!=":
+ return a != b
+ }
+ return false
+}
+
+func cmpBooleanBooleanF(op string, a, b bool) bool {
+ switch op {
+ case "or":
+ return a || b
+ case "and":
+ return a && b
+ }
+ return false
+}
+
+func cmpNumericNumeric(t iterator, op string, m, n interface{}) bool {
+ a := m.(float64)
+ b := n.(float64)
+ return cmpNumberNumberF(op, a, b)
+}
+
+func cmpNumericString(t iterator, op string, m, n interface{}) bool {
+ a := m.(float64)
+ b := n.(string)
+ num, err := strconv.ParseFloat(b, 64)
+ if err != nil {
+ panic(err)
+ }
+ return cmpNumberNumberF(op, a, num)
+}
+
+func cmpNumericNodeSet(t iterator, op string, m, n interface{}) bool {
+ a := m.(float64)
+ b := n.(query)
+
+ for {
+ node := b.Select(t)
+ if node == nil {
+ break
+ }
+ num, err := strconv.ParseFloat(node.Value(), 64)
+ if err != nil {
+ panic(err)
+ }
+ if cmpNumberNumberF(op, a, num) {
+ return true
+ }
+ }
+ return false
+}
+
+func cmpNodeSetNumeric(t iterator, op string, m, n interface{}) bool {
+ a := m.(query)
+ b := n.(float64)
+ for {
+ node := a.Select(t)
+ if node == nil {
+ break
+ }
+ num, err := strconv.ParseFloat(node.Value(), 64)
+ if err != nil {
+ panic(err)
+ }
+ if cmpNumberNumberF(op, num, b) {
+ return true
+ }
+ }
+ return false
+}
+
+func cmpNodeSetString(t iterator, op string, m, n interface{}) bool {
+ a := m.(query)
+ b := n.(string)
+ for {
+ node := a.Select(t)
+ if node == nil {
+ break
+ }
+ if cmpStringStringF(op, b, node.Value()) {
+ return true
+ }
+ }
+ return false
+}
+
+func cmpNodeSetNodeSet(t iterator, op string, m, n interface{}) bool {
+ return false
+}
+
+func cmpStringNumeric(t iterator, op string, m, n interface{}) bool {
+ a := m.(string)
+ b := n.(float64)
+ num, err := strconv.ParseFloat(a, 64)
+ if err != nil {
+ panic(err)
+ }
+ return cmpNumberNumberF(op, b, num)
+}
+
+func cmpStringString(t iterator, op string, m, n interface{}) bool {
+ a := m.(string)
+ b := n.(string)
+ return cmpStringStringF(op, a, b)
+}
+
+func cmpStringNodeSet(t iterator, op string, m, n interface{}) bool {
+ a := m.(string)
+ b := n.(query)
+ for {
+ node := b.Select(t)
+ if node == nil {
+ break
+ }
+ if cmpStringStringF(op, a, node.Value()) {
+ return true
+ }
+ }
+ return false
+}
+
+func cmpBooleanBoolean(t iterator, op string, m, n interface{}) bool {
+ a := m.(bool)
+ b := n.(bool)
+ return cmpBooleanBooleanF(op, a, b)
+}
+
+// eqFunc is an `=` operator.
+func eqFunc(t iterator, m, n interface{}) interface{} {
+ t1 := getValueType(m)
+ t2 := getValueType(n)
+ return logicalFuncs[t1][t2](t, "=", m, n)
+}
+
+// gtFunc is an `>` operator.
+func gtFunc(t iterator, m, n interface{}) interface{} {
+ t1 := getValueType(m)
+ t2 := getValueType(n)
+ return logicalFuncs[t1][t2](t, ">", m, n)
+}
+
+// geFunc is an `>=` operator.
+func geFunc(t iterator, m, n interface{}) interface{} {
+ t1 := getValueType(m)
+ t2 := getValueType(n)
+ return logicalFuncs[t1][t2](t, ">=", m, n)
+}
+
+// ltFunc is an `<` operator.
+func ltFunc(t iterator, m, n interface{}) interface{} {
+ t1 := getValueType(m)
+ t2 := getValueType(n)
+ return logicalFuncs[t1][t2](t, "<", m, n)
+}
+
+// leFunc is an `<=` operator.
+func leFunc(t iterator, m, n interface{}) interface{} {
+ t1 := getValueType(m)
+ t2 := getValueType(n)
+ return logicalFuncs[t1][t2](t, "<=", m, n)
+}
+
+// neFunc is an `!=` operator.
+func neFunc(t iterator, m, n interface{}) interface{} {
+ t1 := getValueType(m)
+ t2 := getValueType(n)
+ return logicalFuncs[t1][t2](t, "!=", m, n)
+}
+
+// orFunc is an `or` operator.
+var orFunc = func(t iterator, m, n interface{}) interface{} {
+ t1 := getValueType(m)
+ t2 := getValueType(n)
+ return logicalFuncs[t1][t2](t, "or", m, n)
+}
+
+func numericExpr(m, n interface{}, cb func(float64, float64) float64) float64 {
+ typ := reflect.TypeOf(float64(0))
+ a := reflect.ValueOf(m).Convert(typ)
+ b := reflect.ValueOf(n).Convert(typ)
+ return cb(a.Float(), b.Float())
+}
+
+// plusFunc is an `+` operator.
+var plusFunc = func(m, n interface{}) interface{} {
+ return numericExpr(m, n, func(a, b float64) float64 {
+ return a + b
+ })
+}
+
+// minusFunc is an `-` operator.
+var minusFunc = func(m, n interface{}) interface{} {
+ return numericExpr(m, n, func(a, b float64) float64 {
+ return a - b
+ })
+}
+
+// mulFunc is an `*` operator.
+var mulFunc = func(m, n interface{}) interface{} {
+ return numericExpr(m, n, func(a, b float64) float64 {
+ return a * b
+ })
+}
+
+// divFunc is an `DIV` operator.
+var divFunc = func(m, n interface{}) interface{} {
+ return numericExpr(m, n, func(a, b float64) float64 {
+ return a / b
+ })
+}
+
+// modFunc is an 'MOD' operator.
+var modFunc = func(m, n interface{}) interface{} {
+ return numericExpr(m, n, func(a, b float64) float64 {
+ return float64(int(a) % int(b))
+ })
+}
diff --git a/vendor/github.com/antchfx/xpath/parse.go b/vendor/github.com/antchfx/xpath/parse.go
new file mode 100644
index 0000000..6103131
--- /dev/null
+++ b/vendor/github.com/antchfx/xpath/parse.go
@@ -0,0 +1,1164 @@
+package xpath
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "strconv"
+ "unicode"
+)
+
+// A XPath expression token type.
+type itemType int
+
+const (
+ itemComma itemType = iota // ','
+ itemSlash // '/'
+ itemAt // '@'
+ itemDot // '.'
+ itemLParens // '('
+ itemRParens // ')'
+ itemLBracket // '['
+ itemRBracket // ']'
+ itemStar // '*'
+ itemPlus // '+'
+ itemMinus // '-'
+ itemEq // '='
+ itemLt // '<'
+ itemGt // '>'
+ itemBang // '!'
+ itemDollar // '$'
+ itemApos // '\''
+ itemQuote // '"'
+ itemUnion // '|'
+ itemNe // '!='
+ itemLe // '<='
+ itemGe // '>='
+ itemAnd // '&&'
+ itemOr // '||'
+ itemDotDot // '..'
+ itemSlashSlash // '//'
+ itemName // XML Name
+ itemString // Quoted string constant
+ itemNumber // Number constant
+ itemAxe // Axe (like child::)
+ itemEof // END
+)
+
+// A node is an XPath node in the parse tree.
+type node interface {
+ Type() nodeType
+}
+
+// nodeType identifies the type of a parse tree node.
+type nodeType int
+
+func (t nodeType) Type() nodeType {
+ return t
+}
+
+const (
+ nodeRoot nodeType = iota
+ nodeAxis
+ nodeFilter
+ nodeFunction
+ nodeOperator
+ nodeVariable
+ nodeConstantOperand
+)
+
+type parser struct {
+ r *scanner
+ d int
+}
+
+// newOperatorNode returns new operator node OperatorNode.
+func newOperatorNode(op string, left, right node) node {
+ return &operatorNode{nodeType: nodeOperator, Op: op, Left: left, Right: right}
+}
+
+// newOperand returns new constant operand node OperandNode.
+func newOperandNode(v interface{}) node {
+ return &operandNode{nodeType: nodeConstantOperand, Val: v}
+}
+
+// newAxisNode returns new axis node AxisNode.
+func newAxisNode(axeTyp, localName, prefix, prop string, n node) node {
+ return &axisNode{
+ nodeType: nodeAxis,
+ LocalName: localName,
+ Prefix: prefix,
+ AxeType: axeTyp,
+ Prop: prop,
+ Input: n,
+ }
+}
+
+// newVariableNode returns new variable node VariableNode.
+func newVariableNode(prefix, name string) node {
+ return &variableNode{nodeType: nodeVariable, Name: name, Prefix: prefix}
+}
+
+// newFilterNode returns a new filter node FilterNode.
+func newFilterNode(n, m node) node {
+ return &filterNode{nodeType: nodeFilter, Input: n, Condition: m}
+}
+
+// newRootNode returns a root node.
+func newRootNode(s string) node {
+ return &rootNode{nodeType: nodeRoot, slash: s}
+}
+
+// newFunctionNode returns function call node.
+func newFunctionNode(name, prefix string, args []node) node {
+ return &functionNode{nodeType: nodeFunction, Prefix: prefix, FuncName: name, Args: args}
+}
+
+// testOp reports whether current item name is an operand op.
+func testOp(r *scanner, op string) bool {
+ return r.typ == itemName && r.prefix == "" && r.name == op
+}
+
+func isPrimaryExpr(r *scanner) bool {
+ switch r.typ {
+ case itemString, itemNumber, itemDollar, itemLParens:
+ return true
+ case itemName:
+ return r.canBeFunc && !isNodeType(r)
+ }
+ return false
+}
+
+func isNodeType(r *scanner) bool {
+ switch r.name {
+ case "node", "text", "processing-instruction", "comment":
+ return r.prefix == ""
+ }
+ return false
+}
+
+func isStep(item itemType) bool {
+ switch item {
+ case itemDot, itemDotDot, itemAt, itemAxe, itemStar, itemName:
+ return true
+ }
+ return false
+}
+
+func checkItem(r *scanner, typ itemType) {
+ if r.typ != typ {
+ panic(fmt.Sprintf("%s has an invalid token", r.text))
+ }
+}
+
+// parseExpression parsing the expression with input node n.
+func (p *parser) parseExpression(n node) node {
+ if p.d = p.d + 1; p.d > 200 {
+ panic("the xpath query is too complex(depth > 200)")
+ }
+ n = p.parseOrExpr(n)
+ p.d--
+ return n
+}
+
+// next scanning next item on forward.
+func (p *parser) next() bool {
+ return p.r.nextItem()
+}
+
+func (p *parser) skipItem(typ itemType) {
+ checkItem(p.r, typ)
+ p.next()
+}
+
+// OrExpr ::= AndExpr | OrExpr 'or' AndExpr
+func (p *parser) parseOrExpr(n node) node {
+ opnd := p.parseAndExpr(n)
+ for {
+ if !testOp(p.r, "or") {
+ break
+ }
+ p.next()
+ opnd = newOperatorNode("or", opnd, p.parseAndExpr(n))
+ }
+ return opnd
+}
+
+// AndExpr ::= EqualityExpr | AndExpr 'and' EqualityExpr
+func (p *parser) parseAndExpr(n node) node {
+ opnd := p.parseEqualityExpr(n)
+ for {
+ if !testOp(p.r, "and") {
+ break
+ }
+ p.next()
+ opnd = newOperatorNode("and", opnd, p.parseEqualityExpr(n))
+ }
+ return opnd
+}
+
+// EqualityExpr ::= RelationalExpr | EqualityExpr '=' RelationalExpr | EqualityExpr '!=' RelationalExpr
+func (p *parser) parseEqualityExpr(n node) node {
+ opnd := p.parseRelationalExpr(n)
+Loop:
+ for {
+ var op string
+ switch p.r.typ {
+ case itemEq:
+ op = "="
+ case itemNe:
+ op = "!="
+ default:
+ break Loop
+ }
+ p.next()
+ opnd = newOperatorNode(op, opnd, p.parseRelationalExpr(n))
+ }
+ return opnd
+}
+
+// RelationalExpr ::= AdditiveExpr | RelationalExpr '<' AdditiveExpr | RelationalExpr '>' AdditiveExpr
+// | RelationalExpr '<=' AdditiveExpr
+// | RelationalExpr '>=' AdditiveExpr
+func (p *parser) parseRelationalExpr(n node) node {
+ opnd := p.parseAdditiveExpr(n)
+Loop:
+ for {
+ var op string
+ switch p.r.typ {
+ case itemLt:
+ op = "<"
+ case itemGt:
+ op = ">"
+ case itemLe:
+ op = "<="
+ case itemGe:
+ op = ">="
+ default:
+ break Loop
+ }
+ p.next()
+ opnd = newOperatorNode(op, opnd, p.parseAdditiveExpr(n))
+ }
+ return opnd
+}
+
+// AdditiveExpr ::= MultiplicativeExpr | AdditiveExpr '+' MultiplicativeExpr | AdditiveExpr '-' MultiplicativeExpr
+func (p *parser) parseAdditiveExpr(n node) node {
+ opnd := p.parseMultiplicativeExpr(n)
+Loop:
+ for {
+ var op string
+ switch p.r.typ {
+ case itemPlus:
+ op = "+"
+ case itemMinus:
+ op = "-"
+ default:
+ break Loop
+ }
+ p.next()
+ opnd = newOperatorNode(op, opnd, p.parseMultiplicativeExpr(n))
+ }
+ return opnd
+}
+
+// MultiplicativeExpr ::= UnaryExpr | MultiplicativeExpr MultiplyOperator(*) UnaryExpr
+// | MultiplicativeExpr 'div' UnaryExpr | MultiplicativeExpr 'mod' UnaryExpr
+func (p *parser) parseMultiplicativeExpr(n node) node {
+ opnd := p.parseUnaryExpr(n)
+Loop:
+ for {
+ var op string
+ if p.r.typ == itemStar {
+ op = "*"
+ } else if testOp(p.r, "div") || testOp(p.r, "mod") {
+ op = p.r.name
+ } else {
+ break Loop
+ }
+ p.next()
+ opnd = newOperatorNode(op, opnd, p.parseUnaryExpr(n))
+ }
+ return opnd
+}
+
+// UnaryExpr ::= UnionExpr | '-' UnaryExpr
+func (p *parser) parseUnaryExpr(n node) node {
+ minus := false
+ // ignore '-' sequence
+ for p.r.typ == itemMinus {
+ p.next()
+ minus = !minus
+ }
+ opnd := p.parseUnionExpr(n)
+ if minus {
+ opnd = newOperatorNode("*", opnd, newOperandNode(float64(-1)))
+ }
+ return opnd
+}
+
+// UnionExpr ::= PathExpr | UnionExpr '|' PathExpr
+func (p *parser) parseUnionExpr(n node) node {
+ opnd := p.parsePathExpr(n)
+Loop:
+ for {
+ if p.r.typ != itemUnion {
+ break Loop
+ }
+ p.next()
+ opnd2 := p.parsePathExpr(n)
+ // Checking the node type that must be is node set type?
+ opnd = newOperatorNode("|", opnd, opnd2)
+ }
+ return opnd
+}
+
+// PathExpr ::= LocationPath | FilterExpr | FilterExpr '/' RelativeLocationPath | FilterExpr '//' RelativeLocationPath
+func (p *parser) parsePathExpr(n node) node {
+ var opnd node
+ if isPrimaryExpr(p.r) {
+ opnd = p.parseFilterExpr(n)
+ switch p.r.typ {
+ case itemSlash:
+ p.next()
+ opnd = p.parseRelativeLocationPath(opnd)
+ case itemSlashSlash:
+ p.next()
+ opnd = p.parseRelativeLocationPath(newAxisNode("descendant-or-self", "", "", "", opnd))
+ }
+ } else {
+ opnd = p.parseLocationPath(nil)
+ }
+ return opnd
+}
+
+// FilterExpr ::= PrimaryExpr | FilterExpr Predicate
+func (p *parser) parseFilterExpr(n node) node {
+ opnd := p.parsePrimaryExpr(n)
+ if p.r.typ == itemLBracket {
+ opnd = newFilterNode(opnd, p.parsePredicate(opnd))
+ }
+ return opnd
+}
+
+// Predicate ::= '[' PredicateExpr ']'
+func (p *parser) parsePredicate(n node) node {
+ p.skipItem(itemLBracket)
+ opnd := p.parseExpression(n)
+ p.skipItem(itemRBracket)
+ return opnd
+}
+
+// LocationPath ::= RelativeLocationPath | AbsoluteLocationPath
+func (p *parser) parseLocationPath(n node) (opnd node) {
+ switch p.r.typ {
+ case itemSlash:
+ p.next()
+ opnd = newRootNode("/")
+ if isStep(p.r.typ) {
+ opnd = p.parseRelativeLocationPath(opnd) // ?? child:: or self ??
+ }
+ case itemSlashSlash:
+ p.next()
+ opnd = newRootNode("//")
+ opnd = p.parseRelativeLocationPath(newAxisNode("descendant-or-self", "", "", "", opnd))
+ default:
+ opnd = p.parseRelativeLocationPath(n)
+ }
+ return opnd
+}
+
+// RelativeLocationPath ::= Step | RelativeLocationPath '/' Step | AbbreviatedRelativeLocationPath
+func (p *parser) parseRelativeLocationPath(n node) node {
+ opnd := n
+Loop:
+ for {
+ opnd = p.parseStep(opnd)
+ switch p.r.typ {
+ case itemSlashSlash:
+ p.next()
+ opnd = newAxisNode("descendant-or-self", "", "", "", opnd)
+ case itemSlash:
+ p.next()
+ default:
+ break Loop
+ }
+ }
+ return opnd
+}
+
+// Step ::= AxisSpecifier NodeTest Predicate* | AbbreviatedStep
+func (p *parser) parseStep(n node) node {
+ axeTyp := "child" // default axes value.
+ if p.r.typ == itemDot || p.r.typ == itemDotDot {
+ if p.r.typ == itemDot {
+ axeTyp = "self"
+ } else {
+ axeTyp = "parent"
+ }
+ p.next()
+ return newAxisNode(axeTyp, "", "", "", n)
+ }
+ switch p.r.typ {
+ case itemAt:
+ p.next()
+ axeTyp = "attribute"
+ case itemAxe:
+ axeTyp = p.r.name
+ p.next()
+ }
+ opnd := p.parseNodeTest(n, axeTyp)
+ for p.r.typ == itemLBracket {
+ opnd = newFilterNode(opnd, p.parsePredicate(opnd))
+ }
+ return opnd
+}
+
+// NodeTest ::= NameTest | nodeType '(' ')' | 'processing-instruction' '(' Literal ')'
+func (p *parser) parseNodeTest(n node, axeTyp string) (opnd node) {
+ switch p.r.typ {
+ case itemName:
+ if p.r.canBeFunc && isNodeType(p.r) {
+ var prop string
+ switch p.r.name {
+ case "comment", "text", "processing-instruction", "node":
+ prop = p.r.name
+ }
+ var name string
+ p.next()
+ p.skipItem(itemLParens)
+ if prop == "processing-instruction" && p.r.typ != itemRParens {
+ checkItem(p.r, itemString)
+ name = p.r.strval
+ p.next()
+ }
+ p.skipItem(itemRParens)
+ opnd = newAxisNode(axeTyp, name, "", prop, n)
+ } else {
+ prefix := p.r.prefix
+ name := p.r.name
+ p.next()
+ if p.r.name == "*" {
+ name = ""
+ }
+ opnd = newAxisNode(axeTyp, name, prefix, "", n)
+ }
+ case itemStar:
+ opnd = newAxisNode(axeTyp, "", "", "", n)
+ p.next()
+ default:
+ panic("expression must evaluate to a node-set")
+ }
+ return opnd
+}
+
+// PrimaryExpr ::= VariableReference | '(' Expr ')' | Literal | Number | FunctionCall
+func (p *parser) parsePrimaryExpr(n node) (opnd node) {
+ switch p.r.typ {
+ case itemString:
+ opnd = newOperandNode(p.r.strval)
+ p.next()
+ case itemNumber:
+ opnd = newOperandNode(p.r.numval)
+ p.next()
+ case itemDollar:
+ p.next()
+ checkItem(p.r, itemName)
+ opnd = newVariableNode(p.r.prefix, p.r.name)
+ p.next()
+ case itemLParens:
+ p.next()
+ opnd = p.parseExpression(n)
+ p.skipItem(itemRParens)
+ case itemName:
+ if p.r.canBeFunc && !isNodeType(p.r) {
+ opnd = p.parseMethod(nil)
+ }
+ }
+ return opnd
+}
+
+// FunctionCall ::= FunctionName '(' ( Argument ( ',' Argument )* )? ')'
+func (p *parser) parseMethod(n node) node {
+ var args []node
+ name := p.r.name
+ prefix := p.r.prefix
+
+ p.skipItem(itemName)
+ p.skipItem(itemLParens)
+ if p.r.typ != itemRParens {
+ for {
+ args = append(args, p.parseExpression(n))
+ if p.r.typ == itemRParens {
+ break
+ }
+ p.skipItem(itemComma)
+ }
+ }
+ p.skipItem(itemRParens)
+ return newFunctionNode(name, prefix, args)
+}
+
+// Parse parsing the XPath express string expr and returns a tree node.
+func parse(expr string) node {
+ r := &scanner{text: expr}
+ r.nextChar()
+ r.nextItem()
+ p := &parser{r: r}
+ return p.parseExpression(nil)
+}
+
+// rootNode holds a top-level node of tree.
+type rootNode struct {
+ nodeType
+ slash string
+}
+
+func (r *rootNode) String() string {
+ return r.slash
+}
+
+// operatorNode holds two Nodes operator.
+type operatorNode struct {
+ nodeType
+ Op string
+ Left, Right node
+}
+
+func (o *operatorNode) String() string {
+ return fmt.Sprintf("%v%s%v", o.Left, o.Op, o.Right)
+}
+
+// axisNode holds a location step.
+type axisNode struct {
+ nodeType
+ Input node
+ Prop string // node-test name.[comment|text|processing-instruction|node]
+ AxeType string // name of the axes.[attribute|ancestor|child|....]
+ LocalName string // local part name of node.
+ Prefix string // prefix name of node.
+}
+
+func (a *axisNode) String() string {
+ var b bytes.Buffer
+ if a.AxeType != "" {
+ b.Write([]byte(a.AxeType + "::"))
+ }
+ if a.Prefix != "" {
+ b.Write([]byte(a.Prefix + ":"))
+ }
+ b.Write([]byte(a.LocalName))
+ if a.Prop != "" {
+ b.Write([]byte("/" + a.Prop + "()"))
+ }
+ return b.String()
+}
+
+// operandNode holds a constant operand.
+type operandNode struct {
+ nodeType
+ Val interface{}
+}
+
+func (o *operandNode) String() string {
+ return fmt.Sprintf("%v", o.Val)
+}
+
+// filterNode holds a condition filter.
+type filterNode struct {
+ nodeType
+ Input, Condition node
+}
+
+func (f *filterNode) String() string {
+ return fmt.Sprintf("%s[%s]", f.Input, f.Condition)
+}
+
+// variableNode holds a variable.
+type variableNode struct {
+ nodeType
+ Name, Prefix string
+}
+
+func (v *variableNode) String() string {
+ if v.Prefix == "" {
+ return v.Name
+ }
+ return fmt.Sprintf("%s:%s", v.Prefix, v.Name)
+}
+
+// functionNode holds a function call.
+type functionNode struct {
+ nodeType
+ Args []node
+ Prefix string
+ FuncName string // function name
+}
+
+func (f *functionNode) String() string {
+ var b bytes.Buffer
+ // fun(arg1, ..., argn)
+ b.Write([]byte(f.FuncName))
+ b.Write([]byte("("))
+ for i, arg := range f.Args {
+ if i > 0 {
+ b.Write([]byte(","))
+ }
+ b.Write([]byte(fmt.Sprintf("%s", arg)))
+ }
+ b.Write([]byte(")"))
+ return b.String()
+}
+
+type scanner struct {
+ text, name, prefix string
+
+ pos int
+ curr rune
+ typ itemType
+ strval string // text value at current pos
+ numval float64 // number value at current pos
+ canBeFunc bool
+}
+
+func (s *scanner) nextChar() bool {
+ if s.pos >= len(s.text) {
+ s.curr = rune(0)
+ return false
+ }
+ s.curr = rune(s.text[s.pos])
+ s.pos += 1
+ return true
+}
+
+func (s *scanner) nextItem() bool {
+ s.skipSpace()
+ switch s.curr {
+ case 0:
+ s.typ = itemEof
+ return false
+ case ',', '@', '(', ')', '|', '*', '[', ']', '+', '-', '=', '#', '$':
+ s.typ = asItemType(s.curr)
+ s.nextChar()
+ case '<':
+ s.typ = itemLt
+ s.nextChar()
+ if s.curr == '=' {
+ s.typ = itemLe
+ s.nextChar()
+ }
+ case '>':
+ s.typ = itemGt
+ s.nextChar()
+ if s.curr == '=' {
+ s.typ = itemGe
+ s.nextChar()
+ }
+ case '!':
+ s.typ = itemBang
+ s.nextChar()
+ if s.curr == '=' {
+ s.typ = itemNe
+ s.nextChar()
+ }
+ case '.':
+ s.typ = itemDot
+ s.nextChar()
+ if s.curr == '.' {
+ s.typ = itemDotDot
+ s.nextChar()
+ } else if isDigit(s.curr) {
+ s.typ = itemNumber
+ s.numval = s.scanFraction()
+ }
+ case '/':
+ s.typ = itemSlash
+ s.nextChar()
+ if s.curr == '/' {
+ s.typ = itemSlashSlash
+ s.nextChar()
+ }
+ case '"', '\'':
+ s.typ = itemString
+ s.strval = s.scanString()
+ default:
+ if isDigit(s.curr) {
+ s.typ = itemNumber
+ s.numval = s.scanNumber()
+ } else if isName(s.curr) {
+ s.typ = itemName
+ s.name = s.scanName()
+ s.prefix = ""
+ // "foo:bar" is one itemem not three because it doesn't allow spaces in between
+ // We should distinct it from "foo::" and need process "foo ::" as well
+ if s.curr == ':' {
+ s.nextChar()
+ // can be "foo:bar" or "foo::"
+ if s.curr == ':' {
+ // "foo::"
+ s.nextChar()
+ s.typ = itemAxe
+ } else { // "foo:*", "foo:bar" or "foo: "
+ s.prefix = s.name
+ if s.curr == '*' {
+ s.nextChar()
+ s.name = "*"
+ } else if isName(s.curr) {
+ s.name = s.scanName()
+ } else {
+ panic(fmt.Sprintf("%s has an invalid qualified name.", s.text))
+ }
+ }
+ } else {
+ s.skipSpace()
+ if s.curr == ':' {
+ s.nextChar()
+ // it can be "foo ::" or just "foo :"
+ if s.curr == ':' {
+ s.nextChar()
+ s.typ = itemAxe
+ } else {
+ panic(fmt.Sprintf("%s has an invalid qualified name.", s.text))
+ }
+ }
+ }
+ s.skipSpace()
+ s.canBeFunc = s.curr == '('
+ } else {
+ panic(fmt.Sprintf("%s has an invalid token.", s.text))
+ }
+ }
+ return true
+}
+
+func (s *scanner) skipSpace() {
+Loop:
+ for {
+ if !unicode.IsSpace(s.curr) || !s.nextChar() {
+ break Loop
+ }
+ }
+}
+
+func (s *scanner) scanFraction() float64 {
+ var (
+ i = s.pos - 2
+ c = 1 // '.'
+ )
+ for isDigit(s.curr) {
+ s.nextChar()
+ c++
+ }
+ v, err := strconv.ParseFloat(s.text[i:i+c], 64)
+ if err != nil {
+ panic(fmt.Errorf("xpath: scanFraction parse float got error: %v", err))
+ }
+ return v
+}
+
+func (s *scanner) scanNumber() float64 {
+ var (
+ c int
+ i = s.pos - 1
+ )
+ for isDigit(s.curr) {
+ s.nextChar()
+ c++
+ }
+ if s.curr == '.' {
+ s.nextChar()
+ c++
+ for isDigit(s.curr) {
+ s.nextChar()
+ c++
+ }
+ }
+ v, err := strconv.ParseFloat(s.text[i:i+c], 64)
+ if err != nil {
+ panic(fmt.Errorf("xpath: scanNumber parse float got error: %v", err))
+ }
+ return v
+}
+
+func (s *scanner) scanString() string {
+ var (
+ c = 0
+ end = s.curr
+ )
+ s.nextChar()
+ i := s.pos - 1
+ for s.curr != end {
+ if !s.nextChar() {
+ panic(errors.New("xpath: scanString got unclosed string"))
+ }
+ c++
+ }
+ s.nextChar()
+ return s.text[i : i+c]
+}
+
+func (s *scanner) scanName() string {
+ var (
+ c int
+ i = s.pos - 1
+ )
+ for isName(s.curr) {
+ c++
+ if !s.nextChar() {
+ break
+ }
+ }
+ return s.text[i : i+c]
+}
+
+func isName(r rune) bool {
+ return string(r) != ":" && string(r) != "/" &&
+ (unicode.Is(first, r) || unicode.Is(second, r) || string(r) == "*")
+}
+
+func isDigit(r rune) bool {
+ return unicode.IsDigit(r)
+}
+
+func asItemType(r rune) itemType {
+ switch r {
+ case ',':
+ return itemComma
+ case '@':
+ return itemAt
+ case '(':
+ return itemLParens
+ case ')':
+ return itemRParens
+ case '|':
+ return itemUnion
+ case '*':
+ return itemStar
+ case '[':
+ return itemLBracket
+ case ']':
+ return itemRBracket
+ case '+':
+ return itemPlus
+ case '-':
+ return itemMinus
+ case '=':
+ return itemEq
+ case '$':
+ return itemDollar
+ }
+ panic(fmt.Errorf("unknown item: %v", r))
+}
+
+var first = &unicode.RangeTable{
+ R16: []unicode.Range16{
+ {0x003A, 0x003A, 1},
+ {0x0041, 0x005A, 1},
+ {0x005F, 0x005F, 1},
+ {0x0061, 0x007A, 1},
+ {0x00C0, 0x00D6, 1},
+ {0x00D8, 0x00F6, 1},
+ {0x00F8, 0x00FF, 1},
+ {0x0100, 0x0131, 1},
+ {0x0134, 0x013E, 1},
+ {0x0141, 0x0148, 1},
+ {0x014A, 0x017E, 1},
+ {0x0180, 0x01C3, 1},
+ {0x01CD, 0x01F0, 1},
+ {0x01F4, 0x01F5, 1},
+ {0x01FA, 0x0217, 1},
+ {0x0250, 0x02A8, 1},
+ {0x02BB, 0x02C1, 1},
+ {0x0386, 0x0386, 1},
+ {0x0388, 0x038A, 1},
+ {0x038C, 0x038C, 1},
+ {0x038E, 0x03A1, 1},
+ {0x03A3, 0x03CE, 1},
+ {0x03D0, 0x03D6, 1},
+ {0x03DA, 0x03E0, 2},
+ {0x03E2, 0x03F3, 1},
+ {0x0401, 0x040C, 1},
+ {0x040E, 0x044F, 1},
+ {0x0451, 0x045C, 1},
+ {0x045E, 0x0481, 1},
+ {0x0490, 0x04C4, 1},
+ {0x04C7, 0x04C8, 1},
+ {0x04CB, 0x04CC, 1},
+ {0x04D0, 0x04EB, 1},
+ {0x04EE, 0x04F5, 1},
+ {0x04F8, 0x04F9, 1},
+ {0x0531, 0x0556, 1},
+ {0x0559, 0x0559, 1},
+ {0x0561, 0x0586, 1},
+ {0x05D0, 0x05EA, 1},
+ {0x05F0, 0x05F2, 1},
+ {0x0621, 0x063A, 1},
+ {0x0641, 0x064A, 1},
+ {0x0671, 0x06B7, 1},
+ {0x06BA, 0x06BE, 1},
+ {0x06C0, 0x06CE, 1},
+ {0x06D0, 0x06D3, 1},
+ {0x06D5, 0x06D5, 1},
+ {0x06E5, 0x06E6, 1},
+ {0x0905, 0x0939, 1},
+ {0x093D, 0x093D, 1},
+ {0x0958, 0x0961, 1},
+ {0x0985, 0x098C, 1},
+ {0x098F, 0x0990, 1},
+ {0x0993, 0x09A8, 1},
+ {0x09AA, 0x09B0, 1},
+ {0x09B2, 0x09B2, 1},
+ {0x09B6, 0x09B9, 1},
+ {0x09DC, 0x09DD, 1},
+ {0x09DF, 0x09E1, 1},
+ {0x09F0, 0x09F1, 1},
+ {0x0A05, 0x0A0A, 1},
+ {0x0A0F, 0x0A10, 1},
+ {0x0A13, 0x0A28, 1},
+ {0x0A2A, 0x0A30, 1},
+ {0x0A32, 0x0A33, 1},
+ {0x0A35, 0x0A36, 1},
+ {0x0A38, 0x0A39, 1},
+ {0x0A59, 0x0A5C, 1},
+ {0x0A5E, 0x0A5E, 1},
+ {0x0A72, 0x0A74, 1},
+ {0x0A85, 0x0A8B, 1},
+ {0x0A8D, 0x0A8D, 1},
+ {0x0A8F, 0x0A91, 1},
+ {0x0A93, 0x0AA8, 1},
+ {0x0AAA, 0x0AB0, 1},
+ {0x0AB2, 0x0AB3, 1},
+ {0x0AB5, 0x0AB9, 1},
+ {0x0ABD, 0x0AE0, 0x23},
+ {0x0B05, 0x0B0C, 1},
+ {0x0B0F, 0x0B10, 1},
+ {0x0B13, 0x0B28, 1},
+ {0x0B2A, 0x0B30, 1},
+ {0x0B32, 0x0B33, 1},
+ {0x0B36, 0x0B39, 1},
+ {0x0B3D, 0x0B3D, 1},
+ {0x0B5C, 0x0B5D, 1},
+ {0x0B5F, 0x0B61, 1},
+ {0x0B85, 0x0B8A, 1},
+ {0x0B8E, 0x0B90, 1},
+ {0x0B92, 0x0B95, 1},
+ {0x0B99, 0x0B9A, 1},
+ {0x0B9C, 0x0B9C, 1},
+ {0x0B9E, 0x0B9F, 1},
+ {0x0BA3, 0x0BA4, 1},
+ {0x0BA8, 0x0BAA, 1},
+ {0x0BAE, 0x0BB5, 1},
+ {0x0BB7, 0x0BB9, 1},
+ {0x0C05, 0x0C0C, 1},
+ {0x0C0E, 0x0C10, 1},
+ {0x0C12, 0x0C28, 1},
+ {0x0C2A, 0x0C33, 1},
+ {0x0C35, 0x0C39, 1},
+ {0x0C60, 0x0C61, 1},
+ {0x0C85, 0x0C8C, 1},
+ {0x0C8E, 0x0C90, 1},
+ {0x0C92, 0x0CA8, 1},
+ {0x0CAA, 0x0CB3, 1},
+ {0x0CB5, 0x0CB9, 1},
+ {0x0CDE, 0x0CDE, 1},
+ {0x0CE0, 0x0CE1, 1},
+ {0x0D05, 0x0D0C, 1},
+ {0x0D0E, 0x0D10, 1},
+ {0x0D12, 0x0D28, 1},
+ {0x0D2A, 0x0D39, 1},
+ {0x0D60, 0x0D61, 1},
+ {0x0E01, 0x0E2E, 1},
+ {0x0E30, 0x0E30, 1},
+ {0x0E32, 0x0E33, 1},
+ {0x0E40, 0x0E45, 1},
+ {0x0E81, 0x0E82, 1},
+ {0x0E84, 0x0E84, 1},
+ {0x0E87, 0x0E88, 1},
+ {0x0E8A, 0x0E8D, 3},
+ {0x0E94, 0x0E97, 1},
+ {0x0E99, 0x0E9F, 1},
+ {0x0EA1, 0x0EA3, 1},
+ {0x0EA5, 0x0EA7, 2},
+ {0x0EAA, 0x0EAB, 1},
+ {0x0EAD, 0x0EAE, 1},
+ {0x0EB0, 0x0EB0, 1},
+ {0x0EB2, 0x0EB3, 1},
+ {0x0EBD, 0x0EBD, 1},
+ {0x0EC0, 0x0EC4, 1},
+ {0x0F40, 0x0F47, 1},
+ {0x0F49, 0x0F69, 1},
+ {0x10A0, 0x10C5, 1},
+ {0x10D0, 0x10F6, 1},
+ {0x1100, 0x1100, 1},
+ {0x1102, 0x1103, 1},
+ {0x1105, 0x1107, 1},
+ {0x1109, 0x1109, 1},
+ {0x110B, 0x110C, 1},
+ {0x110E, 0x1112, 1},
+ {0x113C, 0x1140, 2},
+ {0x114C, 0x1150, 2},
+ {0x1154, 0x1155, 1},
+ {0x1159, 0x1159, 1},
+ {0x115F, 0x1161, 1},
+ {0x1163, 0x1169, 2},
+ {0x116D, 0x116E, 1},
+ {0x1172, 0x1173, 1},
+ {0x1175, 0x119E, 0x119E - 0x1175},
+ {0x11A8, 0x11AB, 0x11AB - 0x11A8},
+ {0x11AE, 0x11AF, 1},
+ {0x11B7, 0x11B8, 1},
+ {0x11BA, 0x11BA, 1},
+ {0x11BC, 0x11C2, 1},
+ {0x11EB, 0x11F0, 0x11F0 - 0x11EB},
+ {0x11F9, 0x11F9, 1},
+ {0x1E00, 0x1E9B, 1},
+ {0x1EA0, 0x1EF9, 1},
+ {0x1F00, 0x1F15, 1},
+ {0x1F18, 0x1F1D, 1},
+ {0x1F20, 0x1F45, 1},
+ {0x1F48, 0x1F4D, 1},
+ {0x1F50, 0x1F57, 1},
+ {0x1F59, 0x1F5B, 0x1F5B - 0x1F59},
+ {0x1F5D, 0x1F5D, 1},
+ {0x1F5F, 0x1F7D, 1},
+ {0x1F80, 0x1FB4, 1},
+ {0x1FB6, 0x1FBC, 1},
+ {0x1FBE, 0x1FBE, 1},
+ {0x1FC2, 0x1FC4, 1},
+ {0x1FC6, 0x1FCC, 1},
+ {0x1FD0, 0x1FD3, 1},
+ {0x1FD6, 0x1FDB, 1},
+ {0x1FE0, 0x1FEC, 1},
+ {0x1FF2, 0x1FF4, 1},
+ {0x1FF6, 0x1FFC, 1},
+ {0x2126, 0x2126, 1},
+ {0x212A, 0x212B, 1},
+ {0x212E, 0x212E, 1},
+ {0x2180, 0x2182, 1},
+ {0x3007, 0x3007, 1},
+ {0x3021, 0x3029, 1},
+ {0x3041, 0x3094, 1},
+ {0x30A1, 0x30FA, 1},
+ {0x3105, 0x312C, 1},
+ {0x4E00, 0x9FA5, 1},
+ {0xAC00, 0xD7A3, 1},
+ },
+}
+
+var second = &unicode.RangeTable{
+ R16: []unicode.Range16{
+ {0x002D, 0x002E, 1},
+ {0x0030, 0x0039, 1},
+ {0x00B7, 0x00B7, 1},
+ {0x02D0, 0x02D1, 1},
+ {0x0300, 0x0345, 1},
+ {0x0360, 0x0361, 1},
+ {0x0387, 0x0387, 1},
+ {0x0483, 0x0486, 1},
+ {0x0591, 0x05A1, 1},
+ {0x05A3, 0x05B9, 1},
+ {0x05BB, 0x05BD, 1},
+ {0x05BF, 0x05BF, 1},
+ {0x05C1, 0x05C2, 1},
+ {0x05C4, 0x0640, 0x0640 - 0x05C4},
+ {0x064B, 0x0652, 1},
+ {0x0660, 0x0669, 1},
+ {0x0670, 0x0670, 1},
+ {0x06D6, 0x06DC, 1},
+ {0x06DD, 0x06DF, 1},
+ {0x06E0, 0x06E4, 1},
+ {0x06E7, 0x06E8, 1},
+ {0x06EA, 0x06ED, 1},
+ {0x06F0, 0x06F9, 1},
+ {0x0901, 0x0903, 1},
+ {0x093C, 0x093C, 1},
+ {0x093E, 0x094C, 1},
+ {0x094D, 0x094D, 1},
+ {0x0951, 0x0954, 1},
+ {0x0962, 0x0963, 1},
+ {0x0966, 0x096F, 1},
+ {0x0981, 0x0983, 1},
+ {0x09BC, 0x09BC, 1},
+ {0x09BE, 0x09BF, 1},
+ {0x09C0, 0x09C4, 1},
+ {0x09C7, 0x09C8, 1},
+ {0x09CB, 0x09CD, 1},
+ {0x09D7, 0x09D7, 1},
+ {0x09E2, 0x09E3, 1},
+ {0x09E6, 0x09EF, 1},
+ {0x0A02, 0x0A3C, 0x3A},
+ {0x0A3E, 0x0A3F, 1},
+ {0x0A40, 0x0A42, 1},
+ {0x0A47, 0x0A48, 1},
+ {0x0A4B, 0x0A4D, 1},
+ {0x0A66, 0x0A6F, 1},
+ {0x0A70, 0x0A71, 1},
+ {0x0A81, 0x0A83, 1},
+ {0x0ABC, 0x0ABC, 1},
+ {0x0ABE, 0x0AC5, 1},
+ {0x0AC7, 0x0AC9, 1},
+ {0x0ACB, 0x0ACD, 1},
+ {0x0AE6, 0x0AEF, 1},
+ {0x0B01, 0x0B03, 1},
+ {0x0B3C, 0x0B3C, 1},
+ {0x0B3E, 0x0B43, 1},
+ {0x0B47, 0x0B48, 1},
+ {0x0B4B, 0x0B4D, 1},
+ {0x0B56, 0x0B57, 1},
+ {0x0B66, 0x0B6F, 1},
+ {0x0B82, 0x0B83, 1},
+ {0x0BBE, 0x0BC2, 1},
+ {0x0BC6, 0x0BC8, 1},
+ {0x0BCA, 0x0BCD, 1},
+ {0x0BD7, 0x0BD7, 1},
+ {0x0BE7, 0x0BEF, 1},
+ {0x0C01, 0x0C03, 1},
+ {0x0C3E, 0x0C44, 1},
+ {0x0C46, 0x0C48, 1},
+ {0x0C4A, 0x0C4D, 1},
+ {0x0C55, 0x0C56, 1},
+ {0x0C66, 0x0C6F, 1},
+ {0x0C82, 0x0C83, 1},
+ {0x0CBE, 0x0CC4, 1},
+ {0x0CC6, 0x0CC8, 1},
+ {0x0CCA, 0x0CCD, 1},
+ {0x0CD5, 0x0CD6, 1},
+ {0x0CE6, 0x0CEF, 1},
+ {0x0D02, 0x0D03, 1},
+ {0x0D3E, 0x0D43, 1},
+ {0x0D46, 0x0D48, 1},
+ {0x0D4A, 0x0D4D, 1},
+ {0x0D57, 0x0D57, 1},
+ {0x0D66, 0x0D6F, 1},
+ {0x0E31, 0x0E31, 1},
+ {0x0E34, 0x0E3A, 1},
+ {0x0E46, 0x0E46, 1},
+ {0x0E47, 0x0E4E, 1},
+ {0x0E50, 0x0E59, 1},
+ {0x0EB1, 0x0EB1, 1},
+ {0x0EB4, 0x0EB9, 1},
+ {0x0EBB, 0x0EBC, 1},
+ {0x0EC6, 0x0EC6, 1},
+ {0x0EC8, 0x0ECD, 1},
+ {0x0ED0, 0x0ED9, 1},
+ {0x0F18, 0x0F19, 1},
+ {0x0F20, 0x0F29, 1},
+ {0x0F35, 0x0F39, 2},
+ {0x0F3E, 0x0F3F, 1},
+ {0x0F71, 0x0F84, 1},
+ {0x0F86, 0x0F8B, 1},
+ {0x0F90, 0x0F95, 1},
+ {0x0F97, 0x0F97, 1},
+ {0x0F99, 0x0FAD, 1},
+ {0x0FB1, 0x0FB7, 1},
+ {0x0FB9, 0x0FB9, 1},
+ {0x20D0, 0x20DC, 1},
+ {0x20E1, 0x3005, 0x3005 - 0x20E1},
+ {0x302A, 0x302F, 1},
+ {0x3031, 0x3035, 1},
+ {0x3099, 0x309A, 1},
+ {0x309D, 0x309E, 1},
+ {0x30FC, 0x30FE, 1},
+ },
+}
diff --git a/vendor/github.com/antchfx/xpath/query.go b/vendor/github.com/antchfx/xpath/query.go
new file mode 100644
index 0000000..3bb5b5c
--- /dev/null
+++ b/vendor/github.com/antchfx/xpath/query.go
@@ -0,0 +1,791 @@
+package xpath
+
+import (
+ "reflect"
+)
+
+type iterator interface {
+ Current() NodeNavigator
+}
+
+// An XPath query interface.
+type query interface {
+ // Select traversing iterator returns a query matched node NodeNavigator.
+ Select(iterator) NodeNavigator
+
+ // Evaluate evaluates query and returns values of the current query.
+ Evaluate(iterator) interface{}
+
+ Clone() query
+}
+
+// contextQuery is returns current node on the iterator object query.
+type contextQuery struct {
+ count int
+ Root bool // Moving to root-level node in the current context iterator.
+}
+
+func (c *contextQuery) Select(t iterator) (n NodeNavigator) {
+ if c.count == 0 {
+ c.count++
+ n = t.Current().Copy()
+ if c.Root {
+ n.MoveToRoot()
+ }
+ }
+ return n
+}
+
+func (c *contextQuery) Evaluate(iterator) interface{} {
+ c.count = 0
+ return c
+}
+
+func (c *contextQuery) Clone() query {
+ return &contextQuery{count: 0, Root: c.Root}
+}
+
+// ancestorQuery is an XPath ancestor node query.(ancestor::*|ancestor-self::*)
+type ancestorQuery struct {
+ iterator func() NodeNavigator
+
+ Self bool
+ Input query
+ Predicate func(NodeNavigator) bool
+}
+
+func (a *ancestorQuery) Select(t iterator) NodeNavigator {
+ for {
+ if a.iterator == nil {
+ node := a.Input.Select(t)
+ if node == nil {
+ return nil
+ }
+ first := true
+ a.iterator = func() NodeNavigator {
+ if first && a.Self {
+ first = false
+ if a.Predicate(node) {
+ return node
+ }
+ }
+ for node.MoveToParent() {
+ if !a.Predicate(node) {
+ break
+ }
+ return node
+ }
+ return nil
+ }
+ }
+
+ if node := a.iterator(); node != nil {
+ return node
+ }
+ a.iterator = nil
+ }
+}
+
+func (a *ancestorQuery) Evaluate(t iterator) interface{} {
+ a.Input.Evaluate(t)
+ a.iterator = nil
+ return a
+}
+
+func (a *ancestorQuery) Test(n NodeNavigator) bool {
+ return a.Predicate(n)
+}
+
+func (a *ancestorQuery) Clone() query {
+ return &ancestorQuery{Self: a.Self, Input: a.Input.Clone(), Predicate: a.Predicate}
+}
+
+// attributeQuery is an XPath attribute node query.(@*)
+type attributeQuery struct {
+ iterator func() NodeNavigator
+
+ Input query
+ Predicate func(NodeNavigator) bool
+}
+
+func (a *attributeQuery) Select(t iterator) NodeNavigator {
+ for {
+ if a.iterator == nil {
+ node := a.Input.Select(t)
+ if node == nil {
+ return nil
+ }
+ node = node.Copy()
+ a.iterator = func() NodeNavigator {
+ for {
+ onAttr := node.MoveToNextAttribute()
+ if !onAttr {
+ return nil
+ }
+ if a.Predicate(node) {
+ return node
+ }
+ }
+ }
+ }
+
+ if node := a.iterator(); node != nil {
+ return node
+ }
+ a.iterator = nil
+ }
+}
+
+func (a *attributeQuery) Evaluate(t iterator) interface{} {
+ a.Input.Evaluate(t)
+ a.iterator = nil
+ return a
+}
+
+func (a *attributeQuery) Test(n NodeNavigator) bool {
+ return a.Predicate(n)
+}
+
+func (a *attributeQuery) Clone() query {
+ return &attributeQuery{Input: a.Input.Clone(), Predicate: a.Predicate}
+}
+
+// childQuery is an XPath child node query.(child::*)
+type childQuery struct {
+ posit int
+ iterator func() NodeNavigator
+
+ Input query
+ Predicate func(NodeNavigator) bool
+}
+
+func (c *childQuery) Select(t iterator) NodeNavigator {
+ for {
+ if c.iterator == nil {
+ c.posit = 0
+ node := c.Input.Select(t)
+ if node == nil {
+ return nil
+ }
+ node = node.Copy()
+ first := true
+ c.iterator = func() NodeNavigator {
+ for {
+ if (first && !node.MoveToChild()) || (!first && !node.MoveToNext()) {
+ return nil
+ }
+ first = false
+ if c.Predicate(node) {
+ return node
+ }
+ }
+ }
+ }
+
+ if node := c.iterator(); node != nil {
+ c.posit++
+ return node
+ }
+ c.iterator = nil
+ }
+}
+
+func (c *childQuery) Evaluate(t iterator) interface{} {
+ c.Input.Evaluate(t)
+ c.iterator = nil
+ return c
+}
+
+func (c *childQuery) Test(n NodeNavigator) bool {
+ return c.Predicate(n)
+}
+
+func (c *childQuery) Clone() query {
+ return &childQuery{Input: c.Input.Clone(), Predicate: c.Predicate}
+}
+
+// position returns a position of current NodeNavigator.
+func (c *childQuery) position() int {
+ return c.posit
+}
+
+// descendantQuery is an XPath descendant node query.(descendant::* | descendant-or-self::*)
+type descendantQuery struct {
+ iterator func() NodeNavigator
+ posit int
+
+ Self bool
+ Input query
+ Predicate func(NodeNavigator) bool
+}
+
+func (d *descendantQuery) Select(t iterator) NodeNavigator {
+ for {
+ if d.iterator == nil {
+ d.posit = 0
+ node := d.Input.Select(t)
+ if node == nil {
+ return nil
+ }
+ node = node.Copy()
+ level := 0
+ first := true
+ d.iterator = func() NodeNavigator {
+ if first && d.Self {
+ first = false
+ if d.Predicate(node) {
+ return node
+ }
+ }
+
+ for {
+ if node.MoveToChild() {
+ level++
+ } else {
+ for {
+ if level == 0 {
+ return nil
+ }
+ if node.MoveToNext() {
+ break
+ }
+ node.MoveToParent()
+ level--
+ }
+ }
+ if d.Predicate(node) {
+ return node
+ }
+ }
+ }
+ }
+
+ if node := d.iterator(); node != nil {
+ d.posit++
+ return node
+ }
+ d.iterator = nil
+ }
+}
+
+func (d *descendantQuery) Evaluate(t iterator) interface{} {
+ d.Input.Evaluate(t)
+ d.iterator = nil
+ return d
+}
+
+func (d *descendantQuery) Test(n NodeNavigator) bool {
+ return d.Predicate(n)
+}
+
+// position returns a position of current NodeNavigator.
+func (d *descendantQuery) position() int {
+ return d.posit
+}
+
+func (d *descendantQuery) Clone() query {
+ return &descendantQuery{Self: d.Self, Input: d.Input.Clone(), Predicate: d.Predicate}
+}
+
+// followingQuery is an XPath following node query.(following::*|following-sibling::*)
+type followingQuery struct {
+ iterator func() NodeNavigator
+
+ Input query
+ Sibling bool // The matching sibling node of current node.
+ Predicate func(NodeNavigator) bool
+}
+
+func (f *followingQuery) Select(t iterator) NodeNavigator {
+ for {
+ if f.iterator == nil {
+ node := f.Input.Select(t)
+ if node == nil {
+ return nil
+ }
+ node = node.Copy()
+ if f.Sibling {
+ f.iterator = func() NodeNavigator {
+ for {
+ if !node.MoveToNext() {
+ return nil
+ }
+ if f.Predicate(node) {
+ return node
+ }
+ }
+ }
+ } else {
+ var q query // descendant query
+ f.iterator = func() NodeNavigator {
+ for {
+ if q == nil {
+ for !node.MoveToNext() {
+ if !node.MoveToParent() {
+ return nil
+ }
+ }
+ q = &descendantQuery{
+ Self: true,
+ Input: &contextQuery{},
+ Predicate: f.Predicate,
+ }
+ t.Current().MoveTo(node)
+ }
+ if node := q.Select(t); node != nil {
+ return node
+ }
+ q = nil
+ }
+ }
+ }
+ }
+
+ if node := f.iterator(); node != nil {
+ return node
+ }
+ f.iterator = nil
+ }
+}
+
+func (f *followingQuery) Evaluate(t iterator) interface{} {
+ f.Input.Evaluate(t)
+ return f
+}
+
+func (f *followingQuery) Test(n NodeNavigator) bool {
+ return f.Predicate(n)
+}
+
+func (f *followingQuery) Clone() query {
+ return &followingQuery{Input: f.Input.Clone(), Sibling: f.Sibling, Predicate: f.Predicate}
+}
+
+// precedingQuery is an XPath preceding node query.(preceding::*)
+type precedingQuery struct {
+ iterator func() NodeNavigator
+ Input query
+ Sibling bool // The matching sibling node of current node.
+ Predicate func(NodeNavigator) bool
+}
+
+func (p *precedingQuery) Select(t iterator) NodeNavigator {
+ for {
+ if p.iterator == nil {
+ node := p.Input.Select(t)
+ if node == nil {
+ return nil
+ }
+ node = node.Copy()
+ if p.Sibling {
+ p.iterator = func() NodeNavigator {
+ for {
+ for !node.MoveToPrevious() {
+ return nil
+ }
+ if p.Predicate(node) {
+ return node
+ }
+ }
+ }
+ } else {
+ var q query
+ p.iterator = func() NodeNavigator {
+ for {
+ if q == nil {
+ for !node.MoveToPrevious() {
+ if !node.MoveToParent() {
+ return nil
+ }
+ }
+ q = &descendantQuery{
+ Self: true,
+ Input: &contextQuery{},
+ Predicate: p.Predicate,
+ }
+ t.Current().MoveTo(node)
+ }
+ if node := q.Select(t); node != nil {
+ return node
+ }
+ q = nil
+ }
+ }
+ }
+ }
+ if node := p.iterator(); node != nil {
+ return node
+ }
+ p.iterator = nil
+ }
+}
+
+func (p *precedingQuery) Evaluate(t iterator) interface{} {
+ p.Input.Evaluate(t)
+ return p
+}
+
+func (p *precedingQuery) Test(n NodeNavigator) bool {
+ return p.Predicate(n)
+}
+
+func (p *precedingQuery) Clone() query {
+ return &precedingQuery{Input: p.Input.Clone(), Sibling: p.Sibling, Predicate: p.Predicate}
+}
+
+// parentQuery is an XPath parent node query.(parent::*)
+type parentQuery struct {
+ Input query
+ Predicate func(NodeNavigator) bool
+}
+
+func (p *parentQuery) Select(t iterator) NodeNavigator {
+ for {
+ node := p.Input.Select(t)
+ if node == nil {
+ return nil
+ }
+ node = node.Copy()
+ if node.MoveToParent() && p.Predicate(node) {
+ return node
+ }
+ }
+}
+
+func (p *parentQuery) Evaluate(t iterator) interface{} {
+ p.Input.Evaluate(t)
+ return p
+}
+
+func (p *parentQuery) Clone() query {
+ return &parentQuery{Input: p.Input.Clone(), Predicate: p.Predicate}
+}
+
+func (p *parentQuery) Test(n NodeNavigator) bool {
+ return p.Predicate(n)
+}
+
+// selfQuery is an Self node query.(self::*)
+type selfQuery struct {
+ Input query
+ Predicate func(NodeNavigator) bool
+}
+
+func (s *selfQuery) Select(t iterator) NodeNavigator {
+ for {
+ node := s.Input.Select(t)
+ if node == nil {
+ return nil
+ }
+
+ if s.Predicate(node) {
+ return node
+ }
+ }
+}
+
+func (s *selfQuery) Evaluate(t iterator) interface{} {
+ s.Input.Evaluate(t)
+ return s
+}
+
+func (s *selfQuery) Test(n NodeNavigator) bool {
+ return s.Predicate(n)
+}
+
+func (s *selfQuery) Clone() query {
+ return &selfQuery{Input: s.Input.Clone(), Predicate: s.Predicate}
+}
+
+// filterQuery is an XPath query for predicate filter.
+type filterQuery struct {
+ Input query
+ Predicate query
+}
+
+func (f *filterQuery) do(t iterator) bool {
+ val := reflect.ValueOf(f.Predicate.Evaluate(t))
+ switch val.Kind() {
+ case reflect.Bool:
+ return val.Bool()
+ case reflect.String:
+ return len(val.String()) > 0
+ case reflect.Float64:
+ pt := float64(getNodePosition(f.Input))
+ return int(val.Float()) == int(pt)
+ default:
+ if q, ok := f.Predicate.(query); ok {
+ return q.Select(t) != nil
+ }
+ }
+ return false
+}
+
+func (f *filterQuery) Select(t iterator) NodeNavigator {
+ for {
+ node := f.Input.Select(t)
+ if node == nil {
+ return node
+ }
+ node = node.Copy()
+ //fmt.Println(node.LocalName())
+
+ t.Current().MoveTo(node)
+ if f.do(t) {
+ return node
+ }
+ }
+}
+
+func (f *filterQuery) Evaluate(t iterator) interface{} {
+ f.Input.Evaluate(t)
+ return f
+}
+
+func (f *filterQuery) Clone() query {
+ return &filterQuery{Input: f.Input.Clone(), Predicate: f.Predicate.Clone()}
+}
+
+// functionQuery is an XPath function that call a function to returns
+// value of current NodeNavigator node.
+type functionQuery struct {
+ Input query // Node Set
+ Func func(query, iterator) interface{} // The xpath function.
+}
+
+func (f *functionQuery) Select(t iterator) NodeNavigator {
+ return nil
+}
+
+// Evaluate call a specified function that will returns the
+// following value type: number,string,boolean.
+func (f *functionQuery) Evaluate(t iterator) interface{} {
+ return f.Func(f.Input, t)
+}
+
+func (f *functionQuery) Clone() query {
+ return &functionQuery{Input: f.Input.Clone(), Func: f.Func}
+}
+
+// constantQuery is an XPath constant operand.
+type constantQuery struct {
+ Val interface{}
+}
+
+func (c *constantQuery) Select(t iterator) NodeNavigator {
+ return nil
+}
+
+func (c *constantQuery) Evaluate(t iterator) interface{} {
+ return c.Val
+}
+
+func (c *constantQuery) Clone() query {
+ return c
+}
+
+// logicalQuery is an XPath logical expression.
+type logicalQuery struct {
+ Left, Right query
+
+ Do func(iterator, interface{}, interface{}) interface{}
+}
+
+func (l *logicalQuery) Select(t iterator) NodeNavigator {
+ // When a XPath expr is logical expression.
+ node := t.Current().Copy()
+ val := l.Evaluate(t)
+ switch val.(type) {
+ case bool:
+ if val.(bool) == true {
+ return node
+ }
+ }
+ return nil
+}
+
+func (l *logicalQuery) Evaluate(t iterator) interface{} {
+ m := l.Left.Evaluate(t)
+ n := l.Right.Evaluate(t)
+ return l.Do(t, m, n)
+}
+
+func (l *logicalQuery) Clone() query {
+ return &logicalQuery{Left: l.Left.Clone(), Right: l.Right.Clone(), Do: l.Do}
+}
+
+// numericQuery is an XPath numeric operator expression.
+type numericQuery struct {
+ Left, Right query
+
+ Do func(interface{}, interface{}) interface{}
+}
+
+func (n *numericQuery) Select(t iterator) NodeNavigator {
+ return nil
+}
+
+func (n *numericQuery) Evaluate(t iterator) interface{} {
+ m := n.Left.Evaluate(t)
+ k := n.Right.Evaluate(t)
+ return n.Do(m, k)
+}
+
+func (n *numericQuery) Clone() query {
+ return &numericQuery{Left: n.Left.Clone(), Right: n.Right.Clone(), Do: n.Do}
+}
+
+type booleanQuery struct {
+ IsOr bool
+ Left, Right query
+ iterator func() NodeNavigator
+}
+
+func (b *booleanQuery) Select(t iterator) NodeNavigator {
+ if b.iterator == nil {
+ var list []NodeNavigator
+ i := 0
+ root := t.Current().Copy()
+ if b.IsOr {
+ for {
+ node := b.Left.Select(t)
+ if node == nil {
+ break
+ }
+ node = node.Copy()
+ list = append(list, node)
+ }
+ t.Current().MoveTo(root)
+ for {
+ node := b.Right.Select(t)
+ if node == nil {
+ break
+ }
+ node = node.Copy()
+ list = append(list, node)
+ }
+ } else {
+ var m []NodeNavigator
+ var n []NodeNavigator
+ for {
+ node := b.Left.Select(t)
+ if node == nil {
+ break
+ }
+ node = node.Copy()
+ list = append(m, node)
+ }
+ t.Current().MoveTo(root)
+ for {
+ node := b.Right.Select(t)
+ if node == nil {
+ break
+ }
+ node = node.Copy()
+ list = append(n, node)
+ }
+ for _, k := range m {
+ for _, j := range n {
+ if k == j {
+ list = append(list, k)
+ }
+ }
+ }
+ }
+
+ b.iterator = func() NodeNavigator {
+ if i >= len(list) {
+ return nil
+ }
+ node := list[i]
+ i++
+ return node
+ }
+ }
+ return b.iterator()
+}
+
+func (b *booleanQuery) Evaluate(t iterator) interface{} {
+ m := b.Left.Evaluate(t)
+ left := asBool(t, m)
+ if b.IsOr && left {
+ return true
+ } else if !b.IsOr && !left {
+ return false
+ }
+ m = b.Right.Evaluate(t)
+ return asBool(t, m)
+}
+
+func (b *booleanQuery) Clone() query {
+ return &booleanQuery{IsOr: b.IsOr, Left: b.Left.Clone(), Right: b.Right.Clone()}
+}
+
+type unionQuery struct {
+ Left, Right query
+ iterator func() NodeNavigator
+}
+
+func (u *unionQuery) Select(t iterator) NodeNavigator {
+ if u.iterator == nil {
+ var list []NodeNavigator
+ var i int
+ root := t.Current().Copy()
+ for {
+ node := u.Left.Select(t)
+ if node == nil {
+ break
+ }
+ node = node.Copy()
+ list = append(list, node)
+ }
+ t.Current().MoveTo(root)
+ for {
+ node := u.Right.Select(t)
+ if node == nil {
+ break
+ }
+ node = node.Copy()
+ var exists bool
+ for _, x := range list {
+ if reflect.DeepEqual(x, node) {
+ exists = true
+ break
+ }
+ }
+ if !exists {
+ list = append(list, node)
+ }
+ }
+ u.iterator = func() NodeNavigator {
+ if i >= len(list) {
+ return nil
+ }
+ node := list[i]
+ i++
+ return node
+ }
+ }
+ return u.iterator()
+}
+
+func (u *unionQuery) Evaluate(t iterator) interface{} {
+ u.iterator = nil
+ u.Left.Evaluate(t)
+ u.Right.Evaluate(t)
+ return u
+}
+
+func (u *unionQuery) Clone() query {
+ return &unionQuery{Left: u.Left.Clone(), Right: u.Right.Clone()}
+}
+
+func getNodePosition(q query) int {
+ type Position interface {
+ position() int
+ }
+ if count, ok := q.(Position); ok {
+ return count.position()
+ }
+ return 1
+}
diff --git a/vendor/github.com/antchfx/xpath/xpath.go b/vendor/github.com/antchfx/xpath/xpath.go
new file mode 100644
index 0000000..7e3f52c
--- /dev/null
+++ b/vendor/github.com/antchfx/xpath/xpath.go
@@ -0,0 +1,157 @@
+package xpath
+
+import (
+ "errors"
+)
+
+// NodeType represents a type of XPath node.
+type NodeType int
+
+const (
+ // RootNode is a root node of the XML document or node tree.
+ RootNode NodeType = iota
+
+ // ElementNode is an element, such as .
+ ElementNode
+
+ // AttributeNode is an attribute, such as id='123'.
+ AttributeNode
+
+ // TextNode is the text content of a node.
+ TextNode
+
+ // CommentNode is a comment node, such as
+ CommentNode
+
+ // allNode is any types of node, used by xpath package only to predicate match.
+ allNode
+)
+
+// NodeNavigator provides cursor model for navigating XML data.
+type NodeNavigator interface {
+ // NodeType returns the XPathNodeType of the current node.
+ NodeType() NodeType
+
+ // LocalName gets the Name of the current node.
+ LocalName() string
+
+ // Prefix returns namespace prefix associated with the current node.
+ Prefix() string
+
+ // Value gets the value of current node.
+ Value() string
+
+ // Copy does a deep copy of the NodeNavigator and all its components.
+ Copy() NodeNavigator
+
+ // MoveToRoot moves the NodeNavigator to the root node of the current node.
+ MoveToRoot()
+
+ // MoveToParent moves the NodeNavigator to the parent node of the current node.
+ MoveToParent() bool
+
+ // MoveToNextAttribute moves the NodeNavigator to the next attribute on current node.
+ MoveToNextAttribute() bool
+
+ // MoveToChild moves the NodeNavigator to the first child node of the current node.
+ MoveToChild() bool
+
+ // MoveToFirst moves the NodeNavigator to the first sibling node of the current node.
+ MoveToFirst() bool
+
+ // MoveToNext moves the NodeNavigator to the next sibling node of the current node.
+ MoveToNext() bool
+
+ // MoveToPrevious moves the NodeNavigator to the previous sibling node of the current node.
+ MoveToPrevious() bool
+
+ // MoveTo moves the NodeNavigator to the same position as the specified NodeNavigator.
+ MoveTo(NodeNavigator) bool
+}
+
+// NodeIterator holds all matched Node object.
+type NodeIterator struct {
+ node NodeNavigator
+ query query
+}
+
+// Current returns current node which matched.
+func (t *NodeIterator) Current() NodeNavigator {
+ return t.node
+}
+
+// MoveNext moves Navigator to the next match node.
+func (t *NodeIterator) MoveNext() bool {
+ n := t.query.Select(t)
+ if n != nil {
+ if !t.node.MoveTo(n) {
+ t.node = n.Copy()
+ }
+ return true
+ }
+ return false
+}
+
+// Select selects a node set using the specified XPath expression.
+// This method is deprecated, recommend using Expr.Select() method instead.
+func Select(root NodeNavigator, expr string) *NodeIterator {
+ exp, err := Compile(expr)
+ if err != nil {
+ panic(err)
+ }
+ return exp.Select(root)
+}
+
+// Expr is an XPath expression for query.
+type Expr struct {
+ s string
+ q query
+}
+
+type iteratorFunc func() NodeNavigator
+
+func (f iteratorFunc) Current() NodeNavigator {
+ return f()
+}
+
+// Evaluate returns the result of the expression.
+// The result type of the expression is one of the follow: bool,float64,string,NodeIterator).
+func (expr *Expr) Evaluate(root NodeNavigator) interface{} {
+ val := expr.q.Evaluate(iteratorFunc(func() NodeNavigator { return root }))
+ switch val.(type) {
+ case query:
+ return &NodeIterator{query: expr.q.Clone(), node: root}
+ }
+ return val
+}
+
+// Select selects a node set using the specified XPath expression.
+func (expr *Expr) Select(root NodeNavigator) *NodeIterator {
+ return &NodeIterator{query: expr.q.Clone(), node: root}
+}
+
+// String returns XPath expression string.
+func (expr *Expr) String() string {
+ return expr.s
+}
+
+// Compile compiles an XPath expression string.
+func Compile(expr string) (*Expr, error) {
+ if expr == "" {
+ return nil, errors.New("expr expression is nil")
+ }
+ qy, err := build(expr)
+ if err != nil {
+ return nil, err
+ }
+ return &Expr{s: expr, q: qy}, nil
+}
+
+// MustCompile compiles an XPath expression string and ignored error.
+func MustCompile(expr string) *Expr {
+ exp, err := Compile(expr)
+ if err != nil {
+ return nil
+ }
+ return exp
+}
diff --git a/vendor/github.com/antchfx/xpath/xpath_test.go b/vendor/github.com/antchfx/xpath/xpath_test.go
new file mode 100644
index 0000000..5f48ab2
--- /dev/null
+++ b/vendor/github.com/antchfx/xpath/xpath_test.go
@@ -0,0 +1,631 @@
+package xpath
+
+import (
+ "bytes"
+ "strings"
+ "testing"
+)
+
+var html *TNode = example()
+
+func TestCompile(t *testing.T) {
+ var err error
+ _, err = Compile("//a")
+ if err != nil {
+ t.Fatalf("//a should be correct but got error %s", err)
+ }
+ _, err = Compile("//a[id=']/span")
+ if err == nil {
+ t.Fatal("//a[id=] should be got correct but is nil")
+ }
+ _, err = Compile("//ul/li/@class")
+ if err != nil {
+ t.Fatalf("//ul/li/@class should be correct but got error %s", err)
+ }
+}
+
+func TestSelf(t *testing.T) {
+ testXPath(t, html, ".", "html")
+ testXPath(t, html.FirstChild, ".", "head")
+ testXPath(t, html, "self::*", "html")
+ testXPath(t, html.LastChild, "self::body", "body")
+ testXPath2(t, html, "//body/./ul/li/a", 3)
+}
+
+func TestParent(t *testing.T) {
+ testXPath(t, html.LastChild, "..", "html")
+ testXPath(t, html.LastChild, "parent::*", "html")
+ a := selectNode(html, "//li/a")
+ testXPath(t, a, "parent::*", "li")
+ testXPath(t, html, "//title/parent::head", "head")
+}
+
+func TestAttribute(t *testing.T) {
+ testXPath(t, html, "@lang='en'", "html")
+ testXPath2(t, html, "@lang='zh'", 0)
+ testXPath2(t, html, "//@href", 3)
+ testXPath2(t, html, "//a[@*]", 3)
+}
+
+func TestRelativePath(t *testing.T) {
+ testXPath(t, html, "head", "head")
+ testXPath(t, html, "/head", "head")
+ testXPath(t, html, "body//li", "li")
+ testXPath(t, html, "/head/title", "title")
+
+ testXPath2(t, html, "/body/ul/li/a", 3)
+ testXPath(t, html, "//title", "title")
+ testXPath(t, html, "//title/..", "head")
+ testXPath(t, html, "//title/../..", "html")
+ testXPath2(t, html, "//a[@href]", 3)
+ testXPath(t, html, "//ul/../footer", "footer")
+}
+
+func TestChild(t *testing.T) {
+ testXPath(t, html, "/child::head", "head")
+ testXPath(t, html, "/child::head/child::title", "title")
+ testXPath(t, html, "//title/../child::title", "title")
+ testXPath(t, html.Parent, "//child::*", "html")
+}
+
+func TestDescendant(t *testing.T) {
+ testXPath2(t, html, "descendant::*", 15)
+ testXPath2(t, html, "/head/descendant::*", 2)
+ testXPath2(t, html, "//ul/descendant::*", 7) //
tag")
+ }
+
+ if paragraphCallbackCount != 2 {
+ t.Error("Failed to find all
tags")
+ }
+}
+
+func BenchmarkOnHTML(b *testing.B) {
+ ts := newTestServer()
+ defer ts.Close()
+
+ c := NewCollector()
+ c.OnHTML("p", func(_ *HTMLElement) {})
+
+ for n := 0; n < b.N; n++ {
+ c.Visit(fmt.Sprintf("%s/html?q=%d", ts.URL, n))
+ }
+}
+
+func BenchmarkOnXML(b *testing.B) {
+ ts := newTestServer()
+ defer ts.Close()
+
+ c := NewCollector()
+ c.OnXML("//p", func(_ *XMLElement) {})
+
+ for n := 0; n < b.N; n++ {
+ c.Visit(fmt.Sprintf("%s/html?q=%d", ts.URL, n))
+ }
+}
+
+func BenchmarkOnResponse(b *testing.B) {
+ ts := newTestServer()
+ defer ts.Close()
+
+ c := NewCollector()
+ c.AllowURLRevisit = true
+ c.OnResponse(func(_ *Response) {})
+
+ for n := 0; n < b.N; n++ {
+ c.Visit(ts.URL)
+ }
+}
diff --git a/vendor/github.com/gocolly/colly/context.go b/vendor/github.com/gocolly/colly/context.go
new file mode 100644
index 0000000..4bc11b9
--- /dev/null
+++ b/vendor/github.com/gocolly/colly/context.go
@@ -0,0 +1,87 @@
+// Copyright 2018 Adam Tauber
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package colly
+
+import (
+ "sync"
+)
+
+// Context provides a tiny layer for passing data between callbacks
+type Context struct {
+ contextMap map[string]interface{}
+ lock *sync.RWMutex
+}
+
+// NewContext initializes a new Context instance
+func NewContext() *Context {
+ return &Context{
+ contextMap: make(map[string]interface{}),
+ lock: &sync.RWMutex{},
+ }
+}
+
+// UnmarshalBinary decodes Context value to nil
+// This function is used by request caching
+func (c *Context) UnmarshalBinary(_ []byte) error {
+ return nil
+}
+
+// MarshalBinary encodes Context value
+// This function is used by request caching
+func (c *Context) MarshalBinary() (_ []byte, _ error) {
+ return nil, nil
+}
+
+// Put stores a value of any type in Context
+func (c *Context) Put(key string, value interface{}) {
+ c.lock.Lock()
+ c.contextMap[key] = value
+ c.lock.Unlock()
+}
+
+// Get retrieves a string value from Context.
+// Get returns an empty string if key not found
+func (c *Context) Get(key string) string {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+ if v, ok := c.contextMap[key]; ok {
+ return v.(string)
+ }
+ return ""
+}
+
+// GetAny retrieves a value from Context.
+// GetAny returns nil if key not found
+func (c *Context) GetAny(key string) interface{} {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+ if v, ok := c.contextMap[key]; ok {
+ return v
+ }
+ return nil
+}
+
+// ForEach iterate context
+func (c *Context) ForEach(fn func(k string, v interface{}) interface{}) []interface{} {
+ c.lock.RLock()
+ defer c.lock.RUnlock()
+
+ ret := make([]interface{}, 0, len(c.contextMap))
+ for k, v := range c.contextMap {
+ ret = append(ret, fn(k, v))
+ }
+
+ return ret
+}
diff --git a/vendor/github.com/gocolly/colly/context_test.go b/vendor/github.com/gocolly/colly/context_test.go
new file mode 100644
index 0000000..07d7d85
--- /dev/null
+++ b/vendor/github.com/gocolly/colly/context_test.go
@@ -0,0 +1,39 @@
+// Copyright 2018 Adam Tauber
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package colly
+
+import (
+ "strconv"
+ "testing"
+)
+
+func TestContextIteration(t *testing.T) {
+ ctx := NewContext()
+ for i := 0; i < 10; i++ {
+ ctx.Put(strconv.Itoa(i), i)
+ }
+ values := ctx.ForEach(func(k string, v interface{}) interface{} {
+ return v.(int)
+ })
+ if len(values) != 10 {
+ t.Fatal("fail to iterate context")
+ }
+ for _, i := range values {
+ v := i.(int)
+ if v != ctx.GetAny(strconv.Itoa(v)).(int) {
+ t.Fatal("value not equal")
+ }
+ }
+}
diff --git a/vendor/github.com/gocolly/colly/debug/debug.go b/vendor/github.com/gocolly/colly/debug/debug.go
new file mode 100644
index 0000000..705d0f7
--- /dev/null
+++ b/vendor/github.com/gocolly/colly/debug/debug.go
@@ -0,0 +1,36 @@
+// Copyright 2018 Adam Tauber
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package debug
+
+// Event represents an action inside a collector
+type Event struct {
+ // Type is the type of the event
+ Type string
+ // RequestID identifies the HTTP request of the Event
+ RequestID uint32
+ // CollectorID identifies the collector of the Event
+ CollectorID uint32
+ // Values contains the event's key-value pairs. Different type of events
+ // can return different key-value pairs
+ Values map[string]string
+}
+
+// Debugger is an interface for different type of debugging backends
+type Debugger interface {
+ // Init initializes the backend
+ Init() error
+ // Event receives a new collector event.
+ Event(e *Event)
+}
diff --git a/vendor/github.com/gocolly/colly/debug/logdebugger.go b/vendor/github.com/gocolly/colly/debug/logdebugger.go
new file mode 100644
index 0000000..f866b6d
--- /dev/null
+++ b/vendor/github.com/gocolly/colly/debug/logdebugger.go
@@ -0,0 +1,54 @@
+// Copyright 2018 Adam Tauber
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package debug
+
+import (
+ "io"
+ "log"
+ "os"
+ "sync/atomic"
+ "time"
+)
+
+// LogDebugger is the simplest debugger which prints log messages to the STDERR
+type LogDebugger struct {
+ // Output is the log destination, anything can be used which implements them
+ // io.Writer interface. Leave it blank to use STDERR
+ Output io.Writer
+ // Prefix appears at the beginning of each generated log line
+ Prefix string
+ // Flag defines the logging properties.
+ Flag int
+ logger *log.Logger
+ counter int32
+ start time.Time
+}
+
+// Init initializes the LogDebugger
+func (l *LogDebugger) Init() error {
+ l.counter = 0
+ l.start = time.Now()
+ if l.Output == nil {
+ l.Output = os.Stderr
+ }
+ l.logger = log.New(l.Output, l.Prefix, l.Flag)
+ return nil
+}
+
+// Event receives Collector events and prints them to STDERR
+func (l *LogDebugger) Event(e *Event) {
+ i := atomic.AddInt32(&l.counter, 1)
+ l.logger.Printf("[%06d] %d [%6d - %s] %q (%s)\n", i, e.CollectorID, e.RequestID, e.Type, e.Values, time.Since(l.start))
+}
diff --git a/vendor/github.com/gocolly/colly/debug/webdebugger.go b/vendor/github.com/gocolly/colly/debug/webdebugger.go
new file mode 100644
index 0000000..e246361
--- /dev/null
+++ b/vendor/github.com/gocolly/colly/debug/webdebugger.go
@@ -0,0 +1,146 @@
+// Copyright 2018 Adam Tauber
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package debug
+
+import (
+ "encoding/json"
+ "log"
+ "net/http"
+ "time"
+)
+
+// WebDebugger is a web based debuging frontend for colly
+type WebDebugger struct {
+ // Address is the address of the web server. It is 127.0.0.1:7676 by default.
+ Address string
+ initialized bool
+ CurrentRequests map[uint32]requestInfo
+ RequestLog []requestInfo
+}
+
+type requestInfo struct {
+ URL string
+ Started time.Time
+ Duration time.Duration
+ ResponseStatus string
+ ID uint32
+ CollectorID uint32
+}
+
+// Init initializes the WebDebugger
+func (w *WebDebugger) Init() error {
+ if w.initialized {
+ return nil
+ }
+ defer func() {
+ w.initialized = true
+ }()
+ if w.Address == "" {
+ w.Address = "127.0.0.1:7676"
+ }
+ w.RequestLog = make([]requestInfo, 0)
+ w.CurrentRequests = make(map[uint32]requestInfo)
+ http.HandleFunc("/", w.indexHandler)
+ http.HandleFunc("/status", w.statusHandler)
+ log.Println("Starting debug webserver on", w.Address)
+ go http.ListenAndServe(w.Address, nil)
+ return nil
+}
+
+// Event updates the debugger's status
+func (w *WebDebugger) Event(e *Event) {
+ switch e.Type {
+ case "request":
+ w.CurrentRequests[e.RequestID] = requestInfo{
+ URL: e.Values["url"],
+ Started: time.Now(),
+ ID: e.RequestID,
+ CollectorID: e.CollectorID,
+ }
+ case "response", "error":
+ r := w.CurrentRequests[e.RequestID]
+ r.Duration = time.Since(r.Started)
+ r.ResponseStatus = e.Values["status"]
+ w.RequestLog = append(w.RequestLog, r)
+ delete(w.CurrentRequests, e.RequestID)
+ }
+}
+
+func (w *WebDebugger) indexHandler(wr http.ResponseWriter, r *http.Request) {
+ wr.Write([]byte(`
+
+
+
+
+
+`))
+}
+
+func (w *WebDebugger) statusHandler(wr http.ResponseWriter, r *http.Request) {
+ jsonData, err := json.MarshalIndent(w, "", " ")
+ if err != nil {
+ panic(err)
+ }
+ wr.Write(jsonData)
+}
diff --git a/vendor/github.com/gocolly/colly/htmlelement.go b/vendor/github.com/gocolly/colly/htmlelement.go
new file mode 100644
index 0000000..92484bd
--- /dev/null
+++ b/vendor/github.com/gocolly/colly/htmlelement.go
@@ -0,0 +1,120 @@
+// Copyright 2018 Adam Tauber
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package colly
+
+import (
+ "strings"
+
+ "github.com/PuerkitoBio/goquery"
+ "golang.org/x/net/html"
+)
+
+// HTMLElement is the representation of a HTML tag.
+type HTMLElement struct {
+ // Name is the name of the tag
+ Name string
+ Text string
+ attributes []html.Attribute
+ // Request is the request object of the element's HTML document
+ Request *Request
+ // Response is the Response object of the element's HTML document
+ Response *Response
+ // DOM is the goquery parsed DOM object of the page. DOM is relative
+ // to the current HTMLElement
+ DOM *goquery.Selection
+ // Index stores the position of the current element within all the elements matched by an OnHTML callback
+ Index int
+}
+
+// NewHTMLElementFromSelectionNode creates a HTMLElement from a goquery.Selection Node.
+func NewHTMLElementFromSelectionNode(resp *Response, s *goquery.Selection, n *html.Node, idx int) *HTMLElement {
+ return &HTMLElement{
+ Name: n.Data,
+ Request: resp.Request,
+ Response: resp,
+ Text: goquery.NewDocumentFromNode(n).Text(),
+ DOM: s,
+ Index: idx,
+ attributes: n.Attr,
+ }
+}
+
+// Attr returns the selected attribute of a HTMLElement or empty string
+// if no attribute found
+func (h *HTMLElement) Attr(k string) string {
+ for _, a := range h.attributes {
+ if a.Key == k {
+ return a.Val
+ }
+ }
+ return ""
+}
+
+// ChildText returns the concatenated and stripped text content of the matching
+// elements.
+func (h *HTMLElement) ChildText(goquerySelector string) string {
+ return strings.TrimSpace(h.DOM.Find(goquerySelector).Text())
+}
+
+// ChildAttr returns the stripped text content of the first matching
+// element's attribute.
+func (h *HTMLElement) ChildAttr(goquerySelector, attrName string) string {
+ if attr, ok := h.DOM.Find(goquerySelector).Attr(attrName); ok {
+ return strings.TrimSpace(attr)
+ }
+ return ""
+}
+
+// ChildAttrs returns the stripped text content of all the matching
+// element's attributes.
+func (h *HTMLElement) ChildAttrs(goquerySelector, attrName string) []string {
+ var res []string
+ h.DOM.Find(goquerySelector).Each(func(_ int, s *goquery.Selection) {
+ if attr, ok := s.Attr(attrName); ok {
+ res = append(res, strings.TrimSpace(attr))
+ }
+ })
+ return res
+}
+
+// ForEach iterates over the elements matched by the first argument
+// and calls the callback function on every HTMLElement match.
+func (h *HTMLElement) ForEach(goquerySelector string, callback func(int, *HTMLElement)) {
+ i := 0
+ h.DOM.Find(goquerySelector).Each(func(_ int, s *goquery.Selection) {
+ for _, n := range s.Nodes {
+ callback(i, NewHTMLElementFromSelectionNode(h.Response, s, n, i))
+ i++
+ }
+ })
+}
+
+// ForEachWithBreak iterates over the elements matched by the first argument
+// and calls the callback function on every HTMLElement match.
+// It is identical to ForEach except that it is possible to break
+// out of the loop by returning false in the callback function. It returns the
+// current Selection object.
+func (h *HTMLElement) ForEachWithBreak(goquerySelector string, callback func(int, *HTMLElement) bool) {
+ i := 0
+ h.DOM.Find(goquerySelector).EachWithBreak(func(_ int, s *goquery.Selection) bool {
+ for _, n := range s.Nodes {
+ if callback(i, NewHTMLElementFromSelectionNode(h.Response, s, n, i)) {
+ i++
+ return true
+ }
+ }
+ return false
+ })
+}
diff --git a/vendor/github.com/gocolly/colly/http_backend.go b/vendor/github.com/gocolly/colly/http_backend.go
new file mode 100644
index 0000000..5c3c216
--- /dev/null
+++ b/vendor/github.com/gocolly/colly/http_backend.go
@@ -0,0 +1,227 @@
+// Copyright 2018 Adam Tauber
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package colly
+
+import (
+ "crypto/sha1"
+ "encoding/gob"
+ "encoding/hex"
+ "io"
+ "io/ioutil"
+ "math/rand"
+ "net/http"
+ "os"
+ "path"
+ "regexp"
+ "sync"
+ "time"
+
+ "compress/gzip"
+
+ "github.com/gobwas/glob"
+)
+
+type httpBackend struct {
+ LimitRules []*LimitRule
+ Client *http.Client
+ lock *sync.RWMutex
+}
+
+// LimitRule provides connection restrictions for domains.
+// Both DomainRegexp and DomainGlob can be used to specify
+// the included domains patterns, but at least one is required.
+// There can be two kind of limitations:
+// - Parallelism: Set limit for the number of concurrent requests to matching domains
+// - Delay: Wait specified amount of time between requests (parallelism is 1 in this case)
+type LimitRule struct {
+ // DomainRegexp is a regular expression to match against domains
+ DomainRegexp string
+ // DomainRegexp is a glob pattern to match against domains
+ DomainGlob string
+ // Delay is the duration to wait before creating a new request to the matching domains
+ Delay time.Duration
+ // RandomDelay is the extra randomized duration to wait added to Delay before creating a new request
+ RandomDelay time.Duration
+ // Parallelism is the number of the maximum allowed concurrent requests of the matching domains
+ Parallelism int
+ waitChan chan bool
+ compiledRegexp *regexp.Regexp
+ compiledGlob glob.Glob
+}
+
+// Init initializes the private members of LimitRule
+func (r *LimitRule) Init() error {
+ waitChanSize := 1
+ if r.Parallelism > 1 {
+ waitChanSize = r.Parallelism
+ }
+ r.waitChan = make(chan bool, waitChanSize)
+ hasPattern := false
+ if r.DomainRegexp != "" {
+ c, err := regexp.Compile(r.DomainRegexp)
+ if err != nil {
+ return err
+ }
+ r.compiledRegexp = c
+ hasPattern = true
+ }
+ if r.DomainGlob != "" {
+ c, err := glob.Compile(r.DomainGlob)
+ if err != nil {
+ return err
+ }
+ r.compiledGlob = c
+ hasPattern = true
+ }
+ if !hasPattern {
+ return ErrNoPattern
+ }
+ return nil
+}
+
+func (h *httpBackend) Init(jar http.CookieJar) {
+ rand.Seed(time.Now().UnixNano())
+ h.Client = &http.Client{
+ Jar: jar,
+ Timeout: 10 * time.Second,
+ }
+ h.lock = &sync.RWMutex{}
+}
+
+// Match checks that the domain parameter triggers the rule
+func (r *LimitRule) Match(domain string) bool {
+ match := false
+ if r.compiledRegexp != nil && r.compiledRegexp.MatchString(domain) {
+ match = true
+ }
+ if r.compiledGlob != nil && r.compiledGlob.Match(domain) {
+ match = true
+ }
+ return match
+}
+
+func (h *httpBackend) GetMatchingRule(domain string) *LimitRule {
+ if h.LimitRules == nil {
+ return nil
+ }
+ h.lock.RLock()
+ defer h.lock.RUnlock()
+ for _, r := range h.LimitRules {
+ if r.Match(domain) {
+ return r
+ }
+ }
+ return nil
+}
+
+func (h *httpBackend) Cache(request *http.Request, bodySize int, cacheDir string) (*Response, error) {
+ if cacheDir == "" || request.Method != "GET" {
+ return h.Do(request, bodySize)
+ }
+ sum := sha1.Sum([]byte(request.URL.String()))
+ hash := hex.EncodeToString(sum[:])
+ dir := path.Join(cacheDir, hash[:2])
+ filename := path.Join(dir, hash)
+ if file, err := os.Open(filename); err == nil {
+ resp := new(Response)
+ err := gob.NewDecoder(file).Decode(resp)
+ file.Close()
+ if resp.StatusCode < 500 {
+ return resp, err
+ }
+ }
+ resp, err := h.Do(request, bodySize)
+ if err != nil || resp.StatusCode >= 500 {
+ return resp, err
+ }
+ if _, err := os.Stat(dir); err != nil {
+ if err := os.MkdirAll(dir, 0750); err != nil {
+ return resp, err
+ }
+ }
+ file, err := os.Create(filename + "~")
+ if err != nil {
+ return resp, err
+ }
+ if err := gob.NewEncoder(file).Encode(resp); err != nil {
+ file.Close()
+ return resp, err
+ }
+ file.Close()
+ return resp, os.Rename(filename+"~", filename)
+}
+
+func (h *httpBackend) Do(request *http.Request, bodySize int) (*Response, error) {
+ r := h.GetMatchingRule(request.URL.Host)
+ if r != nil {
+ r.waitChan <- true
+ defer func(r *LimitRule) {
+ randomDelay := time.Duration(0)
+ if r.RandomDelay != 0 {
+ randomDelay = time.Duration(rand.Int63n(int64(r.RandomDelay)))
+ }
+ time.Sleep(r.Delay + randomDelay)
+ <-r.waitChan
+ }(r)
+ }
+
+ res, err := h.Client.Do(request)
+ if err != nil {
+ return nil, err
+ }
+ if res.Request != nil {
+ *request = *res.Request
+ }
+
+ var bodyReader io.Reader = res.Body
+ if bodySize > 0 {
+ bodyReader = io.LimitReader(bodyReader, int64(bodySize))
+ }
+ if !res.Uncompressed && res.Header.Get("Content-Encoding") == "gzip" {
+ bodyReader, err = gzip.NewReader(bodyReader)
+ if err != nil {
+ return nil, err
+ }
+ }
+ body, err := ioutil.ReadAll(bodyReader)
+ defer res.Body.Close()
+ if err != nil {
+ return nil, err
+ }
+ return &Response{
+ StatusCode: res.StatusCode,
+ Body: body,
+ Headers: &res.Header,
+ }, nil
+}
+
+func (h *httpBackend) Limit(rule *LimitRule) error {
+ h.lock.Lock()
+ if h.LimitRules == nil {
+ h.LimitRules = make([]*LimitRule, 0, 8)
+ }
+ h.LimitRules = append(h.LimitRules, rule)
+ h.lock.Unlock()
+ return rule.Init()
+}
+
+func (h *httpBackend) Limits(rules []*LimitRule) error {
+ for _, r := range rules {
+ if err := h.Limit(r); err != nil {
+ return err
+ }
+ }
+ return nil
+}
diff --git a/vendor/github.com/gocolly/colly/request.go b/vendor/github.com/gocolly/colly/request.go
new file mode 100644
index 0000000..4b94cd2
--- /dev/null
+++ b/vendor/github.com/gocolly/colly/request.go
@@ -0,0 +1,180 @@
+// Copyright 2018 Adam Tauber
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package colly
+
+import (
+ "bytes"
+ "encoding/json"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "strings"
+ "sync/atomic"
+)
+
+// Request is the representation of a HTTP request made by a Collector
+type Request struct {
+ // URL is the parsed URL of the HTTP request
+ URL *url.URL
+ // Headers contains the Request's HTTP headers
+ Headers *http.Header
+ // Ctx is a context between a Request and a Response
+ Ctx *Context
+ // Depth is the number of the parents of the request
+ Depth int
+ // Method is the HTTP method of the request
+ Method string
+ // Body is the request body which is used on POST/PUT requests
+ Body io.Reader
+ // ResponseCharacterencoding is the character encoding of the response body.
+ // Leave it blank to allow automatic character encoding of the response body.
+ // It is empty by default and it can be set in OnRequest callback.
+ ResponseCharacterEncoding string
+ // ID is the Unique identifier of the request
+ ID uint32
+ collector *Collector
+ abort bool
+ baseURL *url.URL
+ // ProxyURL is the proxy address that handles the request
+ ProxyURL string
+}
+
+type serializableRequest struct {
+ URL string
+ Method string
+ Body []byte
+ ID uint32
+ Ctx map[string]interface{}
+ Headers http.Header
+}
+
+// New creates a new request with the context of the original request
+func (r *Request) New(method, URL string, body io.Reader) (*Request, error) {
+ u, err := url.Parse(URL)
+ if err != nil {
+ return nil, err
+ }
+ return &Request{
+ Method: method,
+ URL: u,
+ Body: body,
+ Ctx: r.Ctx,
+ Headers: &http.Header{},
+ ID: atomic.AddUint32(&r.collector.requestCount, 1),
+ collector: r.collector,
+ }, nil
+}
+
+// Abort cancels the HTTP request when called in an OnRequest callback
+func (r *Request) Abort() {
+ r.abort = true
+}
+
+// AbsoluteURL returns with the resolved absolute URL of an URL chunk.
+// AbsoluteURL returns empty string if the URL chunk is a fragment or
+// could not be parsed
+func (r *Request) AbsoluteURL(u string) string {
+ if strings.HasPrefix(u, "#") {
+ return ""
+ }
+ var base *url.URL
+ if r.baseURL != nil {
+ base = r.baseURL
+ } else {
+ base = r.URL
+ }
+ absURL, err := base.Parse(u)
+ if err != nil {
+ return ""
+ }
+ absURL.Fragment = ""
+ if absURL.Scheme == "//" {
+ absURL.Scheme = r.URL.Scheme
+ }
+ return absURL.String()
+}
+
+// Visit continues Collector's collecting job by creating a
+// request and preserves the Context of the previous request.
+// Visit also calls the previously provided callbacks
+func (r *Request) Visit(URL string) error {
+ return r.collector.scrape(r.AbsoluteURL(URL), "GET", r.Depth+1, nil, r.Ctx, nil, true)
+}
+
+// Post continues a collector job by creating a POST request and preserves the Context
+// of the previous request.
+// Post also calls the previously provided callbacks
+func (r *Request) Post(URL string, requestData map[string]string) error {
+ return r.collector.scrape(r.AbsoluteURL(URL), "POST", r.Depth+1, createFormReader(requestData), r.Ctx, nil, true)
+}
+
+// PostRaw starts a collector job by creating a POST request with raw binary data.
+// PostRaw preserves the Context of the previous request
+// and calls the previously provided callbacks
+func (r *Request) PostRaw(URL string, requestData []byte) error {
+ return r.collector.scrape(r.AbsoluteURL(URL), "POST", r.Depth+1, bytes.NewReader(requestData), r.Ctx, nil, true)
+}
+
+// PostMultipart starts a collector job by creating a Multipart POST request
+// with raw binary data. PostMultipart also calls the previously provided.
+// callbacks
+func (r *Request) PostMultipart(URL string, requestData map[string][]byte) error {
+ boundary := randomBoundary()
+ hdr := http.Header{}
+ hdr.Set("Content-Type", "multipart/form-data; boundary="+boundary)
+ hdr.Set("User-Agent", r.collector.UserAgent)
+ return r.collector.scrape(r.AbsoluteURL(URL), "POST", r.Depth+1, createMultipartReader(boundary, requestData), r.Ctx, hdr, true)
+}
+
+// Retry submits HTTP request again with the same parameters
+func (r *Request) Retry() error {
+ return r.collector.scrape(r.URL.String(), r.Method, r.Depth, r.Body, r.Ctx, *r.Headers, false)
+}
+
+// Do submits the request
+func (r *Request) Do() error {
+ return r.collector.scrape(r.URL.String(), r.Method, r.Depth, r.Body, r.Ctx, *r.Headers, !r.collector.AllowURLRevisit)
+}
+
+// Marshal serializes the Request
+func (r *Request) Marshal() ([]byte, error) {
+ ctx := make(map[string]interface{})
+ if r.Ctx != nil {
+ r.Ctx.ForEach(func(k string, v interface{}) interface{} {
+ ctx[k] = v
+ return nil
+ })
+ }
+ var err error
+ var body []byte
+ if r.Body != nil {
+ body, err = ioutil.ReadAll(r.Body)
+ if err != nil {
+ return nil, err
+ }
+ }
+ sr := &serializableRequest{
+ URL: r.URL.String(),
+ Method: r.Method,
+ Body: body,
+ ID: r.ID,
+ Ctx: ctx,
+ }
+ if r.Headers != nil {
+ sr.Headers = *r.Headers
+ }
+ return json.Marshal(sr)
+}
diff --git a/vendor/github.com/gocolly/colly/response.go b/vendor/github.com/gocolly/colly/response.go
new file mode 100644
index 0000000..29ba6ae
--- /dev/null
+++ b/vendor/github.com/gocolly/colly/response.go
@@ -0,0 +1,99 @@
+// Copyright 2018 Adam Tauber
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package colly
+
+import (
+ "bytes"
+ "fmt"
+ "io/ioutil"
+ "mime"
+ "net/http"
+ "strings"
+
+ "github.com/saintfish/chardet"
+ "golang.org/x/net/html/charset"
+)
+
+// Response is the representation of a HTTP response made by a Collector
+type Response struct {
+ // StatusCode is the status code of the Response
+ StatusCode int
+ // Body is the content of the Response
+ Body []byte
+ // Ctx is a context between a Request and a Response
+ Ctx *Context
+ // Request is the Request object of the response
+ Request *Request
+ // Headers contains the Response's HTTP headers
+ Headers *http.Header
+}
+
+// Save writes response body to disk
+func (r *Response) Save(fileName string) error {
+ return ioutil.WriteFile(fileName, r.Body, 0644)
+}
+
+// FileName returns the sanitized file name parsed from "Content-Disposition"
+// header or from URL
+func (r *Response) FileName() string {
+ _, params, err := mime.ParseMediaType(r.Headers.Get("Content-Disposition"))
+ if fName, ok := params["filename"]; ok && err == nil {
+ return SanitizeFileName(fName)
+ }
+ if r.Request.URL.RawQuery != "" {
+ return SanitizeFileName(fmt.Sprintf("%s_%s", r.Request.URL.Path, r.Request.URL.RawQuery))
+ }
+ return SanitizeFileName(strings.TrimPrefix(r.Request.URL.Path, "/"))
+}
+
+func (r *Response) fixCharset(detectCharset bool, defaultEncoding string) error {
+ if defaultEncoding != "" {
+ tmpBody, err := encodeBytes(r.Body, "text/plain; charset="+defaultEncoding)
+ if err != nil {
+ return err
+ }
+ r.Body = tmpBody
+ return nil
+ }
+ contentType := strings.ToLower(r.Headers.Get("Content-Type"))
+ if !strings.Contains(contentType, "charset") {
+ if !detectCharset {
+ return nil
+ }
+ d := chardet.NewTextDetector()
+ r, err := d.DetectBest(r.Body)
+ if err != nil {
+ return err
+ }
+ contentType = "text/plain; charset=" + r.Charset
+ }
+ if strings.Contains(contentType, "utf-8") || strings.Contains(contentType, "utf8") {
+ return nil
+ }
+ tmpBody, err := encodeBytes(r.Body, contentType)
+ if err != nil {
+ return err
+ }
+ r.Body = tmpBody
+ return nil
+}
+
+func encodeBytes(b []byte, contentType string) ([]byte, error) {
+ r, err := charset.NewReader(bytes.NewReader(b), contentType)
+ if err != nil {
+ return nil, err
+ }
+ return ioutil.ReadAll(r)
+}
diff --git a/vendor/github.com/gocolly/colly/storage/storage.go b/vendor/github.com/gocolly/colly/storage/storage.go
new file mode 100644
index 0000000..fcb0c0c
--- /dev/null
+++ b/vendor/github.com/gocolly/colly/storage/storage.go
@@ -0,0 +1,128 @@
+// Copyright 2018 Adam Tauber
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package storage
+
+import (
+ "net/http"
+ "net/http/cookiejar"
+ "net/url"
+ "strings"
+ "sync"
+)
+
+// Storage is an interface which handles Collector's internal data,
+// like visited urls and cookies.
+// The default Storage of the Collector is the InMemoryStorage.
+// Collector's storage can be changed by calling Collector.SetStorage()
+// function.
+type Storage interface {
+ // Init initializes the storage
+ Init() error
+ // Visited receives and stores a request ID that is visited by the Collector
+ Visited(requestID uint64) error
+ // IsVisited returns true if the request was visited before IsVisited
+ // is called
+ IsVisited(requestID uint64) (bool, error)
+ // Cookies retrieves stored cookies for a given host
+ Cookies(u *url.URL) string
+ // SetCookies stores cookies for a given host
+ SetCookies(u *url.URL, cookies string)
+}
+
+// InMemoryStorage is the default storage backend of colly.
+// InMemoryStorage keeps cookies and visited urls in memory
+// without persisting data on the disk.
+type InMemoryStorage struct {
+ visitedURLs map[uint64]bool
+ lock *sync.RWMutex
+ jar *cookiejar.Jar
+}
+
+// Init initializes InMemoryStorage
+func (s *InMemoryStorage) Init() error {
+ if s.visitedURLs == nil {
+ s.visitedURLs = make(map[uint64]bool)
+ }
+ if s.lock == nil {
+ s.lock = &sync.RWMutex{}
+ }
+ if s.jar == nil {
+ var err error
+ s.jar, err = cookiejar.New(nil)
+ return err
+ }
+ return nil
+}
+
+// Visited implements Storage.Visited()
+func (s *InMemoryStorage) Visited(requestID uint64) error {
+ s.lock.Lock()
+ s.visitedURLs[requestID] = true
+ s.lock.Unlock()
+ return nil
+}
+
+// IsVisited implements Storage.IsVisited()
+func (s *InMemoryStorage) IsVisited(requestID uint64) (bool, error) {
+ s.lock.RLock()
+ visited := s.visitedURLs[requestID]
+ s.lock.RUnlock()
+ return visited, nil
+}
+
+// Cookies implements Storage.Cookies()
+func (s *InMemoryStorage) Cookies(u *url.URL) string {
+ return StringifyCookies(s.jar.Cookies(u))
+}
+
+// SetCookies implements Storage.SetCookies()
+func (s *InMemoryStorage) SetCookies(u *url.URL, cookies string) {
+ s.jar.SetCookies(u, UnstringifyCookies(cookies))
+}
+
+// Close implements Storage.Close()
+func (s *InMemoryStorage) Close() error {
+ return nil
+}
+
+// StringifyCookies serializes list of http.Cookies to string
+func StringifyCookies(cookies []*http.Cookie) string {
+ // Stringify cookies.
+ cs := make([]string, len(cookies))
+ for i, c := range cookies {
+ cs[i] = c.String()
+ }
+ return strings.Join(cs, "\n")
+}
+
+// UnstringifyCookies deserializes a cookie string to http.Cookies
+func UnstringifyCookies(s string) []*http.Cookie {
+ h := http.Header{}
+ for _, c := range strings.Split(s, "\n") {
+ h.Add("Set-Cookie", c)
+ }
+ r := http.Response{Header: h}
+ return r.Cookies()
+}
+
+// ContainsCookie checks if a cookie name is represented in cookies
+func ContainsCookie(cookies []*http.Cookie, name string) bool {
+ for _, c := range cookies {
+ if c.Name == name {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/gocolly/colly/unmarshal.go b/vendor/github.com/gocolly/colly/unmarshal.go
new file mode 100644
index 0000000..c4a66c4
--- /dev/null
+++ b/vendor/github.com/gocolly/colly/unmarshal.go
@@ -0,0 +1,171 @@
+// Copyright 2018 Adam Tauber
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package colly
+
+import (
+ "errors"
+ "reflect"
+ "strings"
+
+ "github.com/PuerkitoBio/goquery"
+)
+
+// Unmarshal is a shorthand for colly.UnmarshalHTML
+func (h *HTMLElement) Unmarshal(v interface{}) error {
+ return UnmarshalHTML(v, h.DOM)
+}
+
+// UnmarshalHTML declaratively extracts text or attributes to a struct from
+// HTML response using struct tags composed of css selectors.
+// Allowed struct tags:
+// - "selector" (required): CSS (goquery) selector of the desired data
+// - "attr" (optional): Selects the matching element's attribute's value.
+// Leave it blank or omit to get the text of the element.
+//
+// Example struct declaration:
+//
+// type Nested struct {
+// String string `selector:"div > p"`
+// Classes []string `selector:"li" attr:"class"`
+// Struct *Nested `selector:"div > div"`
+// }
+//
+// Supported types: struct, *struct, string, []string
+func UnmarshalHTML(v interface{}, s *goquery.Selection) error {
+ rv := reflect.ValueOf(v)
+
+ if rv.Kind() != reflect.Ptr || rv.IsNil() {
+ return errors.New("Invalid type or nil-pointer")
+ }
+
+ sv := rv.Elem()
+ st := reflect.TypeOf(v).Elem()
+
+ for i := 0; i < sv.NumField(); i++ {
+ attrV := sv.Field(i)
+ if !attrV.CanAddr() || !attrV.CanSet() {
+ continue
+ }
+ if err := unmarshalAttr(s, attrV, st.Field(i)); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func unmarshalAttr(s *goquery.Selection, attrV reflect.Value, attrT reflect.StructField) error {
+ selector := attrT.Tag.Get("selector")
+ //selector is "-" specify that field should ignore.
+ if selector == "-" {
+ return nil
+ }
+ htmlAttr := attrT.Tag.Get("attr")
+ // TODO support more types
+ switch attrV.Kind() {
+ case reflect.Slice:
+ if err := unmarshalSlice(s, selector, htmlAttr, attrV); err != nil {
+ return err
+ }
+ case reflect.String:
+ val := getDOMValue(s.Find(selector), htmlAttr)
+ attrV.Set(reflect.Indirect(reflect.ValueOf(val)))
+ case reflect.Struct:
+ if err := unmarshalStruct(s, selector, attrV); err != nil {
+ return err
+ }
+ case reflect.Ptr:
+ if err := unmarshalPtr(s, selector, attrV); err != nil {
+ return err
+ }
+ default:
+ return errors.New("Invalid type: " + attrV.String())
+ }
+ return nil
+}
+
+func unmarshalStruct(s *goquery.Selection, selector string, attrV reflect.Value) error {
+ newS := s
+ if selector != "" {
+ newS = newS.Find(selector)
+ }
+ if newS.Nodes == nil {
+ return nil
+ }
+ v := reflect.New(attrV.Type())
+ err := UnmarshalHTML(v.Interface(), newS)
+ if err != nil {
+ return err
+ }
+ attrV.Set(reflect.Indirect(v))
+ return nil
+}
+
+func unmarshalPtr(s *goquery.Selection, selector string, attrV reflect.Value) error {
+ newS := s
+ if selector != "" {
+ newS = newS.Find(selector)
+ }
+ if newS.Nodes == nil {
+ return nil
+ }
+ e := attrV.Type().Elem()
+ if e.Kind() != reflect.Struct {
+ return errors.New("Invalid slice type")
+ }
+ v := reflect.New(e)
+ err := UnmarshalHTML(v.Interface(), newS)
+ if err != nil {
+ return err
+ }
+ attrV.Set(v)
+ return nil
+}
+
+func unmarshalSlice(s *goquery.Selection, selector, htmlAttr string, attrV reflect.Value) error {
+ if attrV.Pointer() == 0 {
+ v := reflect.MakeSlice(attrV.Type(), 0, 0)
+ attrV.Set(v)
+ }
+ switch attrV.Type().Elem().Kind() {
+ case reflect.String:
+ s.Find(selector).Each(func(_ int, s *goquery.Selection) {
+ val := getDOMValue(s, htmlAttr)
+ attrV.Set(reflect.Append(attrV, reflect.Indirect(reflect.ValueOf(val))))
+ })
+ case reflect.Ptr:
+ s.Find(selector).Each(func(_ int, innerSel *goquery.Selection) {
+ someVal := reflect.New(attrV.Type().Elem().Elem())
+ UnmarshalHTML(someVal.Interface(), innerSel)
+ attrV.Set(reflect.Append(attrV, someVal))
+ })
+ case reflect.Struct:
+ s.Find(selector).Each(func(_ int, innerSel *goquery.Selection) {
+ someVal := reflect.New(attrV.Type().Elem())
+ UnmarshalHTML(someVal.Interface(), innerSel)
+ attrV.Set(reflect.Append(attrV, reflect.Indirect(someVal)))
+ })
+ default:
+ return errors.New("Invalid slice type")
+ }
+ return nil
+}
+
+func getDOMValue(s *goquery.Selection, attr string) string {
+ if attr == "" {
+ return strings.TrimSpace(s.First().Text())
+ }
+ attrV, _ := s.Attr(attr)
+ return attrV
+}
diff --git a/vendor/github.com/gocolly/colly/unmarshal_test.go b/vendor/github.com/gocolly/colly/unmarshal_test.go
new file mode 100644
index 0000000..59ae58a
--- /dev/null
+++ b/vendor/github.com/gocolly/colly/unmarshal_test.go
@@ -0,0 +1,129 @@
+// Copyright 2018 Adam Tauber
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package colly
+
+import (
+ "bytes"
+ "testing"
+
+ "github.com/PuerkitoBio/goquery"
+)
+
+var basicTestData = []byte(`
list item 1
list item 2
3
`)
+var nestedTestData = []byte(`
a
b
c
`)
+var pointerSliceTestData = []byte(`
Information: Info 1
Information: Info 2
`)
+
+func TestBasicUnmarshal(t *testing.T) {
+ doc, _ := goquery.NewDocumentFromReader(bytes.NewBuffer(basicTestData))
+ e := &HTMLElement{
+ DOM: doc.First(),
+ }
+ s := struct {
+ String string `selector:"li:first-child" attr:"class"`
+ Items []string `selector:"li"`
+ Struct struct {
+ String string `selector:"li:last-child"`
+ }
+ }{}
+ if err := e.Unmarshal(&s); err != nil {
+ t.Error("Cannot unmarshal struct: " + err.Error())
+ }
+ if s.String != "x" {
+ t.Errorf(`Invalid data for String: %q, expected "x"`, s.String)
+ }
+ if s.Struct.String != "3" {
+ t.Errorf(`Invalid data for Struct.String: %q, expected "3"`, s.Struct.String)
+ }
+}
+
+func TestNestedUnmarshal(t *testing.T) {
+ doc, _ := goquery.NewDocumentFromReader(bytes.NewBuffer(nestedTestData))
+ e := &HTMLElement{
+ DOM: doc.First(),
+ }
+ type nested struct {
+ String string `selector:"div > p"`
+ Struct *nested `selector:"div > div"`
+ }
+ s := nested{}
+ if err := e.Unmarshal(&s); err != nil {
+ t.Error("Cannot unmarshal struct: " + err.Error())
+ }
+ if s.String != "a" {
+ t.Errorf(`Invalid data for String: %q, expected "a"`, s.String)
+ }
+ if s.Struct.String != "b" {
+ t.Errorf(`Invalid data for Struct.String: %q, expected "b"`, s.Struct.String)
+ }
+ if s.Struct.Struct.String != "c" {
+ t.Errorf(`Invalid data for Struct.Struct.String: %q, expected "c"`, s.Struct.Struct.String)
+ }
+}
+
+func TestPointerSliceUnmarshall(t *testing.T) {
+ type info struct {
+ Text string `selector:"span"`
+ }
+ type object struct {
+ Info []*info `selector:"li.info"`
+ }
+
+ doc, _ := goquery.NewDocumentFromReader(bytes.NewBuffer(pointerSliceTestData))
+ e := HTMLElement{DOM: doc.First()}
+ o := object{}
+ err := e.Unmarshal(&o)
+ if err != nil {
+ t.Fatalf("Failed to unmarshal page: %s\n", err.Error())
+ }
+
+ if len(o.Info) != 2 {
+ t.Errorf("Invalid length for Info: %d, expected 2", len(o.Info))
+ }
+ if o.Info[0].Text != "Info 1" {
+ t.Errorf("Invalid data for Info.[0].Text: %s, expected Info 1", o.Info[0].Text)
+ }
+ if o.Info[1].Text != "Info 2" {
+ t.Errorf("Invalid data for Info.[1].Text: %s, expected Info 2", o.Info[1].Text)
+ }
+
+}
+
+func TestStructSliceUnmarshall(t *testing.T) {
+ type info struct {
+ Text string `selector:"span"`
+ }
+ type object struct {
+ Info []info `selector:"li.info"`
+ }
+
+ doc, _ := goquery.NewDocumentFromReader(bytes.NewBuffer(pointerSliceTestData))
+ e := HTMLElement{DOM: doc.First()}
+ o := object{}
+ err := e.Unmarshal(&o)
+ if err != nil {
+ t.Fatalf("Failed to unmarshal page: %s\n", err.Error())
+ }
+
+ if len(o.Info) != 2 {
+ t.Errorf("Invalid length for Info: %d, expected 2", len(o.Info))
+ }
+ if o.Info[0].Text != "Info 1" {
+ t.Errorf("Invalid data for Info.[0].Text: %s, expected Info 1", o.Info[0].Text)
+ }
+ if o.Info[1].Text != "Info 2" {
+ t.Errorf("Invalid data for Info.[1].Text: %s, expected Info 2", o.Info[1].Text)
+ }
+
+}
diff --git a/vendor/github.com/gocolly/colly/xmlelement.go b/vendor/github.com/gocolly/colly/xmlelement.go
new file mode 100644
index 0000000..90b566a
--- /dev/null
+++ b/vendor/github.com/gocolly/colly/xmlelement.go
@@ -0,0 +1,170 @@
+// Copyright 2018 Adam Tauber
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package colly
+
+import (
+ "encoding/xml"
+ "strings"
+
+ "github.com/antchfx/htmlquery"
+ "github.com/antchfx/xmlquery"
+ "golang.org/x/net/html"
+)
+
+// XMLElement is the representation of a XML tag.
+type XMLElement struct {
+ // Name is the name of the tag
+ Name string
+ Text string
+ attributes interface{}
+ // Request is the request object of the element's HTML document
+ Request *Request
+ // Response is the Response object of the element's HTML document
+ Response *Response
+ // DOM is the DOM object of the page. DOM is relative
+ // to the current XMLElement and is either a html.Node or xmlquery.Node
+ // based on how the XMLElement was created.
+ DOM interface{}
+ isHTML bool
+}
+
+// NewXMLElementFromHTMLNode creates a XMLElement from a html.Node.
+func NewXMLElementFromHTMLNode(resp *Response, s *html.Node) *XMLElement {
+ return &XMLElement{
+ Name: s.Data,
+ Request: resp.Request,
+ Response: resp,
+ Text: htmlquery.InnerText(s),
+ DOM: s,
+ attributes: s.Attr,
+ isHTML: true,
+ }
+}
+
+// NewXMLElementFromXMLNode creates a XMLElement from a xmlquery.Node.
+func NewXMLElementFromXMLNode(resp *Response, s *xmlquery.Node) *XMLElement {
+ return &XMLElement{
+ Name: s.Data,
+ Request: resp.Request,
+ Response: resp,
+ Text: s.InnerText(),
+ DOM: s,
+ attributes: s.Attr,
+ isHTML: false,
+ }
+}
+
+// Attr returns the selected attribute of a HTMLElement or empty string
+// if no attribute found
+func (h *XMLElement) Attr(k string) string {
+ if h.isHTML {
+ for _, a := range h.attributes.([]html.Attribute) {
+ if a.Key == k {
+ return a.Val
+ }
+ }
+ } else {
+ for _, a := range h.attributes.([]xml.Attr) {
+ if a.Name.Local == k {
+ return a.Value
+ }
+ }
+ }
+ return ""
+}
+
+// ChildText returns the concatenated and stripped text content of the matching
+// elements.
+func (h *XMLElement) ChildText(xpathQuery string) string {
+ if h.isHTML {
+ child := htmlquery.FindOne(h.DOM.(*html.Node), xpathQuery)
+ if child == nil {
+ return ""
+ }
+ return strings.TrimSpace(htmlquery.InnerText(child))
+ }
+ child := xmlquery.FindOne(h.DOM.(*xmlquery.Node), xpathQuery)
+ if child == nil {
+ return ""
+ }
+ return strings.TrimSpace(child.InnerText())
+
+}
+
+// ChildAttr returns the stripped text content of the first matching
+// element's attribute.
+func (h *XMLElement) ChildAttr(xpathQuery, attrName string) string {
+ if h.isHTML {
+ child := htmlquery.FindOne(h.DOM.(*html.Node), xpathQuery)
+ if child != nil {
+ for _, attr := range child.Attr {
+ if attr.Key == attrName {
+ return strings.TrimSpace(attr.Val)
+ }
+ }
+ }
+ } else {
+ child := xmlquery.FindOne(h.DOM.(*xmlquery.Node), xpathQuery)
+ if child != nil {
+ for _, attr := range child.Attr {
+ if attr.Name.Local == attrName {
+ return strings.TrimSpace(attr.Value)
+ }
+ }
+ }
+ }
+
+ return ""
+}
+
+// ChildAttrs returns the stripped text content of all the matching
+// element's attributes.
+func (h *XMLElement) ChildAttrs(xpathQuery, attrName string) []string {
+ var res []string
+ if h.isHTML {
+ htmlquery.FindEach(h.DOM.(*html.Node), xpathQuery, func(i int, child *html.Node) {
+ for _, attr := range child.Attr {
+ if attr.Key == attrName {
+ res = append(res, strings.TrimSpace(attr.Val))
+ }
+ }
+ })
+ } else {
+ xmlquery.FindEach(h.DOM.(*xmlquery.Node), xpathQuery, func(i int, child *xmlquery.Node) {
+ for _, attr := range child.Attr {
+ if attr.Name.Local == attrName {
+ res = append(res, strings.TrimSpace(attr.Value))
+ }
+ }
+ })
+ }
+ return res
+}
+
+// ChildTexts returns an array of strings corresponding to child elements that match the xpath query.
+// Each item in the array is the stripped text content of the corresponding matching child element.
+func (h *XMLElement) ChildTexts(xpathQuery string) []string {
+ texts := make([]string, 0)
+ if h.isHTML {
+ htmlquery.FindEach(h.DOM.(*html.Node), xpathQuery, func(i int, child *html.Node) {
+ texts = append(texts, strings.TrimSpace(htmlquery.InnerText(child)))
+ })
+ } else {
+ xmlquery.FindEach(h.DOM.(*xmlquery.Node), xpathQuery, func(i int, child *xmlquery.Node) {
+ texts = append(texts, strings.TrimSpace(child.InnerText()))
+ })
+ }
+ return texts
+}
diff --git a/vendor/github.com/gocolly/colly/xmlelement_test.go b/vendor/github.com/gocolly/colly/xmlelement_test.go
new file mode 100644
index 0000000..ac7a1ae
--- /dev/null
+++ b/vendor/github.com/gocolly/colly/xmlelement_test.go
@@ -0,0 +1,123 @@
+// Copyright 2018 Adam Tauber
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package colly_test
+
+import (
+ "github.com/antchfx/htmlquery"
+ "github.com/gocolly/colly"
+ "reflect"
+ "strings"
+ "testing"
+)
+
+// Borrowed from http://infohost.nmt.edu/tcc/help/pubs/xhtml/example.html
+// Added attributes to the `
` tags for testing purposes
+const htmlPage = `
+
+
+
+ Your page title here
+
+
+