import std/httpclient import std/htmlparser import std/xmltree import std/strtabs import std/os import scrap import std/json import std/strutils import std/tables import std/macros import std/enumerate var client = newHttpClient() var html: string # # main_loop <------------ # | | | # normal_loop array_loop | # (var node) (var array[]) | # | | # ----->------------------- # proc dbg_dl(level : int)= for a in 0..level: stdout.write " " stdout.write level stdout.write("[do_loop]") proc dbg_ml(level : int)= for a in 0..level: stdout.write " " stdout.write level stdout.write("[main_loop]") proc dbg_al(level : int)= for a in 0..level: stdout.write " " stdout.write level stdout.write("[array_loop]") proc main_loop[T]( node : T , level : int, searchString : string = "",path :string = "") proc do_loop[T](node: T,level : int, searchString : string = "", path: string = "", key : string) = if node.kind == JObject: for y in node.keys: if searchString == "" or path.contains(searchString) or y.contains(searchString): dbg_dl(level) stdout.write path & "][" & key & "][" & y," [", node[y].kind,"] {" ,key,"}" if node[y].kind == Jint: stdout.write "(val: ", node[y],")\n" else: stdout.write "\n" main_loop(node[y],level+1,searchString,path & "][" & key & "][" & y) proc do_array_loop(node : JsonNode, level : int, searchString : string = "", path: string = "",key :string = "") = for i,b in enumerate(node.getElems): if searchString == "" or path.contains(searchString) or b.getStr().contains(searchString): dbg_al(level) echo "node : ", "array_node_len: ", b.len()," ,subkind: " ,b.kind, " mainkind: ", node.kind, "seq[",i,"]", " path:",path," key:",key main_loop(b,level+1,searchString,path & "][" & key) proc main_loop[T]( node : T , level : int, searchString : string = "",path :string = "") = if node.len() > 0: if node.kind == JObject: for i,b in enumerate(node.pairs): if searchString == "" or path.contains(searchString) or b.key.contains(searchString): dbg_ml(level) echo "element:seq[",i,"]",b, " path: ",path for a in node.keys: if node[a].kind == JObject: do_loop(node[a],level,searchString,path, a) elif node[a].kind == JArray: do_array_loop(node[a],level+1,searchString,path,a) elif node.kind == JArray: do_array_loop(node,level+1,searchString,path) echo "URL:" var url: string = "https://www.comparis.ch/immobilien/marktplatz/lenzburg/mieten?page=2"#readLine(stdin) echo "given url is: ",url try: html = client.getContent(url) let node = parseHtml(html) var htmlnode: XmlNode var entry : Entry add(entry.desc,Descriptor(name : "test", html_context_tag : "div",html_context_attrs : "class",html_context_key : "css-19re50j", html_tag : "p",aattrs : "class",attrs_key : "css-svet6u")) #comparis stores part of the data in a script tag as a json-string, so we get that part: add(entry.desc,Descriptor(name : "script-json", html_context_tag : "script",html_context_attrs : "type",html_context_key : r"application/json", html_tag : "",aattrs : "",attrs_key : "")) echo("###########") discard entry.getEntryFromHtml(node) #echo entry.desc[1].content[0] let jsonString = replaceWord(entry.desc[1].content[0],sub = r"\"",by = r""") let jsonNode = parseJson(jsonString) #echo "jsonString: ",jsonString #echo "JSON Node:", jsonNode let field = jsonNode.getFields() main_loop(jsonNode,0,searchString = "resultItems") #-props-pageProps-initialResultData-resultItems echo "node fields: ",jsonNode["props"]["pageProps"]["initialResultData"]["resultItems"][1]["Title"] echo "node fields: ",jsonNode["props"]["pageProps"]["initialResultData"]["adIds"][0] finally: client.close()