{"id":632,"date":"2020-06-18T12:21:04","date_gmt":"2020-06-18T04:21:04","guid":{"rendered":"http:\/\/www.guanhaobo.cn\/?p=632"},"modified":"2020-06-18T12:21:04","modified_gmt":"2020-06-18T04:21:04","slug":"%e5%a4%a7%e6%95%b0%e6%8d%ae%e5%ae%9e%e9%aa%8c%e4%ba%94-%e7%bb%bc%e5%90%88%e5%ae%9e%e9%aa%8c","status":"publish","type":"post","link":"https:\/\/www.guanhaobo.cn\/?p=632","title":{"rendered":"\u5927\u6570\u636e\u5b9e\u9a8c\u4e94 \u2014\u2014 \u7efc\u5408\u5b9e\u9a8c"},"content":{"rendered":"<h3>1 \u5b9e\u9a8c\u76ee\u7684<\/h3>\n<p>\u4e86\u89e3Kafka\u3001HDFS\u3001MapReduce\u5728Hadoop\u4f53\u7cfb\u7ed3\u6784\u4e2d\u7684\u89d2\u8272\uff0c\u5e76\u901a\u8fc7\u672c\u6b21\u7efc\u5408\u5b9e\u9a8c\u5bf9\u5927\u6570\u636e\u6280\u672f\u5728\u5b9e\u9645\u5e94\u7528\u4e2d\u7684\u4e3b\u8981\u6d41\u7a0b\u6709\u521d\u6b65\u7684\u8ba4\u8bc6\uff1b<\/p>\n<h3>2 \u5b9e\u9a8c\u73af\u5883<\/h3>\n<p>\u5b9e\u9a8c\u5e73\u53f0\uff1a\u57fa\u4e8e\u5b9e\u9a8c\u4e00\u642d\u5efa\u7684\u865a\u62df\u673aHadoop\u5927\u6570\u636e\u5b9e\u9a8c\u5e73\u53f0\u4e0a\u7684Kafka\u3001HDFS\u3001MapReduce\u96c6\u7fa4\uff1b<br \/>\n\u7f16\u7a0b\u8bed\u8a00\uff1aJAVA\uff08\u63a8\u8350\u4f7f\u7528\uff09\u3001Python\u3001C++\u7b49\uff1b<\/p>\n<h3>3 \u5b9e\u9a8c\u5185\u5bb9<\/h3>\n<ol>\n<li>\u7f16\u7a0b\u5b9e\u73b0Kafka\u751f\u4ea7\u8005\uff0c\u6a21\u62df\u6570\u636e\u91c7\u96c6\u7684\u8fc7\u7a0b\uff0c\u5411\u6307\u5b9atopic\u53d1\u9001\u6570\u636e\u3002<\/li>\n<li>\u7f16\u5199MapReduce\u7a0b\u5e8f\uff0c\u6d88\u8d39\u4e0a\u8ff0topic\u4e2d\u7684\u6570\u636e\uff0c\u5e76\u5bf9\u6570\u636e\u8fdb\u884c\u4e00\u5b9a\u7684\u5904\u7406\uff0c\u5982\u6c42\u548c\u3001\u6392\u5e8f\u7b49\u3002\uff08\u53ef\u9009\u7528Spark\u4ee3\u66ffMapReduce\uff09<\/li>\n<li>MapReduce\u7a0b\u5e8f\u5c06\u5904\u7406\u7ed3\u679c\u5b58\u50a8\u5230HDFS\u6587\u4ef6\u7cfb\u7edf\u4e2d\u3002\uff08\u53ef\u9009\u7528Hbase\u6216Hive\u4ee3\u66ffHDFS\uff0c\u9700\u8981\u5206\u522b\u8bbe\u8ba1Hbase\u8868\u548cHive\u8868\uff09<\/li>\n<li>\u5bf9\u4ee5\u4e0a\u5b9e\u9a8c\u5185\u5bb9\u7f16\u5199\u5b9e\u9a8c\u62a5\u544a\uff0c\u5e76\u63d0\u4ea4\u5b9e\u9a8c\u76f8\u5173\u4ee3\u7801\u3002<\/li>\n<\/ol>\n<h3>4 \u5b9e\u9a8c\u5185\u5bb9<\/h3>\n<p>\u672c\u6b21\u5b9e\u9a8c\u4f7f\u7528\u6570\u636e\u96c6\u540cKafka\u6570\u636e\u91c7\u96c6\u5b9e\u9a8c\u4e2d\u7684\u6570\u636e\u96c6\u3002<\/p>\n<h3>5 \u51c6\u5907\u5de5\u4f5c<\/h3>\n<pre><code class=\"language-cpp line-numbers\">\/\/ \u91cd\u542f\u65f6\u95f4\u540c\u6b65\u670d\u52a1\uff08cluster1 \u4e0a\uff09\nservice ntpd restart\n\/\/ \u540c\u6b65\u65f6\u95f4\uff08cluster2 \u548c cluster3\uff09\nntpdate cluster1\n\/\/ \u5207\u6362\u5230\u7528\u6237hadoop\uff08\u4e09\u53f0\uff09\nsu hadoop\n\/\/\u542f\u52a8zookeeper\uff08\u4e09\u53f0\uff09\nzkServer.sh start\n\/\/\u542f\u52a8kafka\uff08\u4e09\u53f0\uff09\nkafka-server-start.sh \/usr\/local\/kafka_2.10-0.8.2.1\/config\/server.properties &amp;\n\/\/ \u542f\u52a8 HDFS\uff08cluster1 \u4e0a\uff09\nstart-dfs.sh\n\/\/ \u542f\u52a8 YARN\uff08cluster1 \u4e0a\uff09\nstart-yarn.sh\n<\/code><\/pre>\n<h3>6 Kafka\u751f\u4ea7\u8005\u53d1\u9001\u6570\u636e<\/h3>\n<p>\u7f16\u7a0b\u5b9e\u73b0Kafka\u751f\u4ea7\u8005\uff0c\u6a21\u62df\u6570\u636e\u91c7\u96c6\u7684\u8fc7\u7a0b\uff0c\u5411\u6307\u5b9atopic\u53d1\u9001\u6570\u636e\u3002<br \/>\n\u7f16\u5199\u4ee3\u7801<code>ghbProducer.java<\/code><\/p>\n<pre><code class=\"language-java line-numbers\">import java.io.BufferedReader;\nimport java.io.File;\nimport java.io.FileReader;\nimport java.io.IOException;\nimport java.util.Properties;\nimport java.util.Scanner;\nimport kafka.javaapi.producer.Producer;\nimport kafka.producer.KeyedMessage;\nimport kafka.producer.ProducerConfig;\n\npublic class ghbProducer {\n\n    public static void main(String[] args) {\n        Scanner in = new Scanner(System.in);\n        Properties props = new Properties();\n        props.put(\"serializer.class\", \"kafka.serializer.StringEncoder\");\n        props.put(\"metadata.broker.list\", \"cluster1:9092\");\n        Producer&lt;Integer, String&gt; producer = new Producer&lt;Integer, String&gt;(new ProducerConfig(props));\n        String topic;\n        System.out.print(\"\u8bf7\u8f93\u5165topic\u540d\u79f0\uff1a\");\n        topic = in.next();\n        File file = new File(\"kafka\u91c7\u96c6\u6570\u636e\u5b9e\u9a8c.txt\");\n        BufferedReader reader = null;\n        System.out.print(\"\u8bf7\u8f93\u5165\u53d1\u9001\u6570\u636e\u884c\u6570\uff1a\");\n        int num = in.nextInt();\n        try {\n            reader = new BufferedReader(new FileReader(file));\n            String tempString = null;\n            int line = 1;\n            while ((tempString = reader.readLine()) != null) {\n                producer.send(new KeyedMessage&lt;Integer, String&gt;(topic, tempString));\n                System.out.println(\"\u6210\u529f\u53d1\u9001\u7b2c \" + line + \" \u884c\u6570\u636e...\");\n                if (line == num)\n                    break;\n                line++;\n            }\n            reader.close();\n        } catch (Exception e) {\n            e.printStackTrace();\n        } finally {\n            if (reader != null) {\n                try {\n                    reader.close();\n                } catch (IOException e1) {\n                }\n            }\n        }\n        producer.close();\n    }\n}\n<\/code><\/pre>\n<p>\u6253\u5f00WinSCP\uff0c\u5c06<code>ghbProducer.java<\/code>\u548c\u5b9e\u9a8c\u6570\u636e\u4e0a\u4f20\u81f3\u865a\u62df\u673acluster1\u7684\/home\/hadoop\u8def\u5f84\u4e0b\u3002<\/p>\n<p><img decoding=\"async\" src=\"http:\/\/www.guanhaobo.cn\/wp-content\/uploads\/2020\/06\/1001-1.jpg\" alt=\"\" \/><\/p>\n<p>\u5728cluster1\u4e0a\u6267\u884c<\/p>\n<pre><code class=\"language-php line-numbers\">cd \/home\/hadoop\n\/\/ \u7f16\u8bd1\njavac -cp \/usr\/local\/kafka_2.10-0.8.2.1\/libs\/*: ghbProducer.java\n\/\/ \u8fd0\u884c\njava -cp \/usr\/local\/kafka_2.10-0.8.2.1\/libs\/*: ghbProducer\n<\/code><\/pre>\n<p><img decoding=\"async\" src=\"http:\/\/www.guanhaobo.cn\/wp-content\/uploads\/2020\/06\/1002-1.jpg\" alt=\"\" \/><\/p>\n<h3>7 \u6d88\u8d39\u5e76\u5904\u7406\u6570\u636e\uff0c\u5b58\u50a8\u5230HDFS\u6587\u4ef6\u7cfb\u7edf\u4e2d<\/h3>\n<p>\u4e3b\u8981\u601d\u8def\uff1a\u5148\u4eceKafka\u83b7\u53d6\u6570\u636e\uff0c\u4fdd\u5b58\u5230\u76ee\u5f55<code>ghb_lab5_input<\/code>\u4e2d\uff1b\u7136\u540e\u4f7f\u7528MapReduce\u5bf9\u6570\u636e\u8fdb\u884c\u6392\u5e8f\uff0c\u8f93\u51fa\u5230\u76ee\u5f55<code>ghb_lab5_output<\/code>\uff1b\u6700\u540e\u5c06\u5904\u7406\u540e\u7684\u6570\u636e\u4e0a\u4f20\u5230HDFS\u7684\u6839\u76ee\u5f55\u4e0b\uff0c\u6587\u4ef6\u540d\u4e3a<code>lab5out.txt<\/code>\u3002<\/p>\n<p>\u7f16\u5199\u4ee3\u7801<code>ghbMapReduce.java<\/code><\/p>\n<pre><code class=\"language-java line-numbers\">import java.util.*;\nimport java.io.*;\nimport java.net.*;\nimport kafka.consumer.*;\nimport kafka.javaapi.consumer.ConsumerConnector;\nimport org.apache.hadoop.io.Text;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.apache.hadoop.io.IntWritable;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.mapreduce.Job;\nimport org.apache.hadoop.mapreduce.Partitioner;\nimport org.apache.hadoop.mapreduce.lib.input.FileInputFormat;\nimport org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;\nimport org.apache.hadoop.util.GenericOptionsParser;\nimport org.apache.hadoop.mapreduce.Mapper;\nimport org.apache.hadoop.mapreduce.Reducer;\nimport org.apache.hadoop.io.WritableComparable;\nimport org.apache.hadoop.fs.FileSystem;\n\npublic class ghbMapReduce {\n\n    final static String INPUT_PATH = \"ghb_lab5_input\";\/\/ \u8f93\u5165\u76ee\u5f55\n    final static String OUTPUT_PATH = \"ghb_lab5_output\";\/\/ \u8f93\u51fa\u76ee\u5f55\n\n    public static void main(String[] args) {\n        \/\/ \u6d88\u8d39\u6570\u636e\n        Consume();\n\n        \/\/ \u6392\u5e8f\n        Sort();\n\n        \/\/ \u4fdd\u5b58\u5230HDFS\n        SaveToHDFS();\n    }\n\n    private static void SaveToHDFS() {\n        try {\n\n            Configuration conf = new Configuration();\n            URI uri = new URI(\"hdfs:\/\/cluster1:9000\");\n            FileSystem fs = FileSystem.get(uri, conf);\n            \/\/ \u672c\u5730\u6587\u4ef6\n            Path src = new Path(OUTPUT_PATH + \"\/part-r-00000\");\n            \/\/ HDFS\u5b58\u653e\u4f4d\u7f6e\n            Path dst = new Path(\"\/lab5out.txt\");\n\n            fs.copyFromLocalFile(src, dst);\n            System.out.println(\"Upload to \" + conf.get(\"fs.defaultFS\"));\n        } catch (Exception e) {\n            e.printStackTrace();\n        }\n    }\n\n    private static void Sort() {\n\n        try {\n            Runtime.getRuntime().exec(\"rm -rf \" + OUTPUT_PATH);\/\/ \u5220\u9664\u4e0a\u6b21\u7684\u8f93\u51fa\u76ee\u5f55\n            Configuration conf = new Configuration();\n            Job job = Job.getInstance(conf, \"ghbMapReduce\");\n            job.setJarByClass(ghbMapReduce.class);\n            \/\/ job.setJar(\"sortTest.jar\");\n\n            job.setMapperClass(sortMapper.class);\n            job.setMapOutputKeyClass(Text.class);\n            job.setMapOutputValueClass(Text.class);\n\n            job.setReducerClass(sortReducer.class);\n            job.setOutputKeyClass(Text.class);\n            job.setOutputValueClass(Text.class);\n\n            FileInputFormat.addInputPath(job, new Path(INPUT_PATH));\n            FileOutputFormat.setOutputPath(job, new Path(OUTPUT_PATH));\n\n            job.waitForCompletion(true);\n            \/\/ System.exit(job.waitForCompletion(true) ? 0 : 1);\n        } catch (Exception e) {\n            e.printStackTrace();\n        }\n    }\n\n    public static class sortMapper extends Mapper&lt;Object, Text, Text, Text&gt; {\n        protected void map(Object key, Text value, Context context) throws IOException, InterruptedException {\n            \/\/ \u6846\u67b6\u9ed8\u8ba4\u662f\u6839\u636e\u952ekey\u8fdb\u884c\u6392\u5e8f\uff0c\u6240\u4ee5\u5148\u628a\u6570\u5b57\u8f6c\u79fb\u5230key\u4e0a\u9762\n            context.write(value, new Text(\"\"));\n        }\n    }\n\n    public static class sortReducer extends Reducer&lt;Text, Text, Text, Text&gt; {\n        protected void reduce(Text key, Iterable&lt;Text&gt; values, Context context)\n                throws IOException, InterruptedException {\n            \/\/ \u5728key\u524d\u9762\u5728\u52a0\u4e0a\u4e00\u4e2a\u8868\u793a\u4f4d\u7f6e\u7684\u5e8f\u53f7\n            \/\/ context.write(new Text(index + \" \" + key.toString()), new Text(\"\"));\n            context.write(key, new Text(\"\"));\n        }\n    }\n\n    private static void Consume() {\n        try {\n            Scanner in = new Scanner(System.in);\n            String topic, path;\n            System.out.print(\"\u8bf7\u8f93\u5165topic\u540d\u79f0\uff1a\");\n            topic = in.next();\n            System.out.print(\"\u8bf7\u8f93\u5165\u6d88\u8d39\u6570\u91cf\uff1a\");\n            int num = in.nextInt();\n            System.out.print(\"\u4fdd\u5b58\u81f3\u6587\u4ef6\uff1a\");\n            path = in.next();\n            path = INPUT_PATH + \"\/\" + path;\n            ConsumerConnector consumer = Consumer.createJavaConsumerConnector(createConsumerConfig());\n            Map&lt;String, Integer&gt; topicCountMap = new HashMap&lt;String, Integer&gt;();\n            topicCountMap.put(topic, new Integer(1));\n            Map&lt;String, List&lt;KafkaStream&lt;byte[], byte[]&gt;&gt;&gt; consumerMap = consumer.createMessageStreams(topicCountMap);\n            KafkaStream&lt;byte[], byte[]&gt; stream = consumerMap.get(topic).get(0);\n            ConsumerIterator&lt;byte[], byte[]&gt; it = stream.iterator();\n            int i = 1;\n            BufferedWriter bw = new BufferedWriter(new FileWriter(path));\n            while (i &lt;= num) {\n                String out = new String(it.next().message());\n                bw.write(out + '\\n');\n                System.out.println(\"\u6d88\u8d39\u7b2c\" + i + \"\u884c\u6570\u636e    \" + out);\n                i++;\n            }\n            bw.close();\n        } catch (Exception e) {\n            e.printStackTrace();\n        }\n    }\n\n    private static ConsumerConfig createConsumerConfig() {\n        Properties props = new Properties();\n        props.put(\"group.id\", \"group1\");\n        props.put(\"zookeeper.connect\", \"cluster1:2181,cluster2:2181,cluster3:2181\");\n        props.put(\"zookeeper.session.timeout.ms\", \"400\");\n        props.put(\"zookeeper.sync.time.ms\", \"200\");\n        props.put(\"auto.commit.interval.ms\", \"1000\");\n        return new ConsumerConfig(props);\n    }\n\n}\n<\/code><\/pre>\n<p>\u6253\u5f00WinSCP\uff0c\u5c06<code>ghbMapReduce.java<\/code>\u4e0a\u4f20\u81f3\u865a\u62df\u673acluster2\u7684\/home\/hadoop\u8def\u5f84\u4e0b\u3002<\/p>\n<p>cluster2\u4e0a\u6267\u884c<\/p>\n<pre><code class=\"language-cpp line-numbers\">cd \/home\/hadoop\n\/\/ \u521b\u5efa\u6587\u4ef6\u5939\nmkdir ghb_lab5_input\n\n\/\/ \u6784\u9020\u65b0\u7684\u547d\u4ee4ghb_javac\uff0c\u6ce8\u610f\u4e0b\u9762\u662f\u4e00\u884c\uff0c\u4e0d\u8981\u5199\u6210\u591a\u884c\nalias ghb_javac=\"javac -cp \/usr\/local\/hadoop-2.6.5\/share\/hadoop\/common\/*:\/usr\/local\/hadoop-2.6.5\/share\/hadoop\/common\/lib\/*:\/usr\/local\/hadoop-2.6.5\/share\/hadoop\/hdfs\/lib\/*:\/usr\/local\/hadoop-2.6.5\/share\/hadoop\/hdfs\/*:\/usr\/local\/hadoop-2.6.5\/share\/hadoop\/mapreduce\/*:\/usr\/local\/hadoop-2.6.5\/share\/hadoop\/mapreduce\/lib\/*:\/usr\/local\/hadoop-2.6.5\/share\/hadoop\/yarn\/lib\/*:\/usr\/local\/hadoop-2.6.5\/share\/hadoop\/yarn\/*:\/usr\/local\/kafka_2.10-0.8.2.1\/libs\/*:\"\n\n\/\/ \u6784\u9020\u65b0\u7684\u547d\u4ee4ghb_java\uff0c\u6ce8\u610f\u4e0b\u9762\u662f\u4e00\u884c\uff0c\u4e0d\u8981\u5199\u6210\u591a\u884c\nalias ghb_java=\"java -cp \/usr\/local\/hadoop-2.6.5\/share\/hadoop\/common\/*:\/usr\/local\/hadoop-2.6.5\/share\/hadoop\/common\/lib\/*:\/usr\/local\/hadoop-2.6.5\/share\/hadoop\/hdfs\/lib\/*:\/usr\/local\/hadoop-2.6.5\/share\/hadoop\/hdfs\/*:\/usr\/local\/hadoop-2.6.5\/share\/hadoop\/mapreduce\/*:\/usr\/local\/hadoop-2.6.5\/share\/hadoop\/mapreduce\/lib\/*:\/usr\/local\/hadoop-2.6.5\/share\/hadoop\/yarn\/lib\/*:\/usr\/local\/hadoop-2.6.5\/share\/hadoop\/yarn\/*:\/usr\/local\/kafka_2.10-0.8.2.1\/libs\/*:\"\n\n\/\/ \u7f16\u8bd1\nghb_javac ghbMapReduce.java\n\/\/ \u8fd0\u884c\nghb_java ghbMapReduce\n<\/code><\/pre>\n<p>\u8f93\u5165topic\u540d\u79f0\u3001\u6d88\u8d39\u6570\u91cf\u3001\u4fdd\u5b58\u6587\u4ef6\u540d\u7b49\u4fe1\u606f\u3002<\/p>\n<p><img decoding=\"async\" src=\"http:\/\/www.guanhaobo.cn\/wp-content\/uploads\/2020\/06\/1003-1.jpg\" alt=\"\" \/><\/p>\n<h3>8 \u67e5\u770b\u8fd0\u884c\u7ed3\u679c<\/h3>\n<h4>8.1 \u67e5\u770b\u4eceKafka\u8bfb\u53d6\u7684\u6570\u636e<\/h4>\n<p>cluster2\u4e0a\u6267\u884c<\/p>\n<pre><code class=\"language-cpp line-numbers\">cd ghb_lab5_input\nls\ncat ghb123.txt\n<\/code><\/pre>\n<p><img decoding=\"async\" src=\"http:\/\/www.guanhaobo.cn\/wp-content\/uploads\/2020\/06\/1004-1.jpg\" alt=\"\" \/><\/p>\n<h4>8.2 \u67e5\u770b\u6392\u5e8f\u5904\u7406\u540e\u7684\u6570\u636e<\/h4>\n<p>cluster2\u4e0a\u6267\u884c<\/p>\n<pre><code class=\"language-cpp line-numbers\">cd ..\ncd ghb_lab5_output\nls\ncat part-r-00000\n<\/code><\/pre>\n<p><img decoding=\"async\" src=\"http:\/\/www.guanhaobo.cn\/wp-content\/uploads\/2020\/06\/1005-1.jpg\" alt=\"\" \/><\/p>\n<p><img decoding=\"async\" src=\"http:\/\/www.guanhaobo.cn\/wp-content\/uploads\/2020\/06\/1006-1.jpg\" alt=\"\" \/><\/p>\n<h4>8.3 \u67e5\u770b\u4e0a\u4f20\u5230 HDFS \u7684\u6587\u4ef6<\/h4>\n<p>cluster1 \u6216 cluster3\u4e0a\u6267\u884c<\/p>\n<pre><code class=\"language-cpp line-numbers\">\/\/ \u67e5\u770bhdfs\u6839\u76ee\u5f55\nhdfs dfs -ls \/\n\/\/ \u5c06lab5out.txt\u4e0b\u8f7d\u5230\u672c\u5730\nhdfs dfs -get \/lab5out.txt\nls\ncat lab5out.txt\n<\/code><\/pre>\n<p><img decoding=\"async\" src=\"http:\/\/www.guanhaobo.cn\/wp-content\/uploads\/2020\/06\/1007-1.jpg\" alt=\"\" \/><\/p>\n<h3>9 \u5173\u673a\u524d\u64cd\u4f5c<\/h3>\n<pre><code class=\"language-cpp line-numbers\">\/\/\u505c\u6b62kafka\uff08\u4e09\u53f0\uff09\nkafka-server-stop.sh\n\/\/\u505c\u6b62zookeeper\uff08\u4e09\u53f0\uff09\nzkServer.sh stop\n\/\/ \u5173\u95ed HDFS\uff08cluster1 \u4e0a\uff09\nstop-dfs.sh\n\/\/ \u5173\u95ed YARN\uff08cluster1 \u4e0a\uff09\nstop-yarn.sh\n<\/code><\/pre>\n","protected":false},"excerpt":{"rendered":"<p>1 \u5b9e\u9a8c\u76ee\u7684 \u4e86\u89e3Kafka\u3001HDFS\u3001MapReduce\u5728Hadoop\u4f53\u7cfb\u7ed3\u6784\u4e2d\u7684\u89d2\u8272\uff0c\u5e76\u901a\u8fc7\u672c\u6b21\u7efc\u5408\u5b9e\u9a8c\u5bf9 [&hellip;]<\/p>\n","protected":false},"author":2,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[6],"tags":[44],"class_list":["post-632","post","type-post","status-publish","format-standard","hentry","category-homework","tag-44"],"_links":{"self":[{"href":"https:\/\/www.guanhaobo.cn\/index.php?rest_route=\/wp\/v2\/posts\/632","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/www.guanhaobo.cn\/index.php?rest_route=\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/www.guanhaobo.cn\/index.php?rest_route=\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/www.guanhaobo.cn\/index.php?rest_route=\/wp\/v2\/users\/2"}],"replies":[{"embeddable":true,"href":"https:\/\/www.guanhaobo.cn\/index.php?rest_route=%2Fwp%2Fv2%2Fcomments&post=632"}],"version-history":[{"count":0,"href":"https:\/\/www.guanhaobo.cn\/index.php?rest_route=\/wp\/v2\/posts\/632\/revisions"}],"wp:attachment":[{"href":"https:\/\/www.guanhaobo.cn\/index.php?rest_route=%2Fwp%2Fv2%2Fmedia&parent=632"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/www.guanhaobo.cn\/index.php?rest_route=%2Fwp%2Fv2%2Fcategories&post=632"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/www.guanhaobo.cn\/index.php?rest_route=%2Fwp%2Fv2%2Ftags&post=632"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}