extends Reference signal download_done(url) signal download_failed(url) func download(url): if fileExists(urlToFilePath(url)): emit_signal("download_done", url) return var use_ssl = false if "https" in url: use_ssl = true var splitUrl = url.split("/") var host = splitUrl[2] splitUrl.remove(0) splitUrl.remove(0) splitUrl.remove(0) var uri = "/"+splitUrl.join("/") var err = 0 var http = HTTPClient.new() # Create the Client. err = http.connect_to_host(host, -1, use_ssl) # Connect to host/port. assert(err == OK) # Make sure connection was OK. # Wait until resolved and connected. while http.get_status() == HTTPClient.STATUS_CONNECTING or http.get_status() == HTTPClient.STATUS_RESOLVING: http.poll() print("Connecting...") OS.delay_msec(500) assert(http.get_status() == HTTPClient.STATUS_CONNECTED) # Could not connect # Some headers var headers = [ "User-Agent: Pirulo/1.0 (Godot)", "Accept: */*" ] err = http.request(HTTPClient.METHOD_GET, uri, headers) # Request a page from the site (this one was chunked..) assert(err == OK) # Make sure all is OK. while http.get_status() == HTTPClient.STATUS_REQUESTING: # Keep polling for as long as the request is being processed. http.poll() print("Requesting...") if not OS.has_feature("web"): OS.delay_msec(500) else: # Synchronous HTTP requests are not supported on the web, # so wait for the next main loop iteration. yield(Engine.get_main_loop(), "idle_frame") assert(http.get_status() == HTTPClient.STATUS_BODY or http.get_status() == HTTPClient.STATUS_CONNECTED) # Make sure request finished well. print("response? ", http.has_response()) # Site might not have a response. if http.has_response(): # If there is a response... headers = http.get_response_headers_as_dictionary() # Get response headers. print("code: ", http.get_response_code()) # Show response code. print("**headers:\\n", headers) # Show headers. # Getting the HTTP Body if http.is_response_chunked(): # Does it use chunks? print("Response is Chunked!") else: # Or just plain Content-Length var bl = http.get_response_body_length() print("Response Length: ", bl) # This method works for both anyway var rb = PoolByteArray() # Array that will hold the data. while http.get_status() == HTTPClient.STATUS_BODY: # While there is body left to be read http.poll() var chunk = http.read_response_body_chunk() # Get a chunk. if chunk.size() == 0: # Got nothing, wait for buffers to fill a bit. OS.delay_usec(1000) else: rb = rb + chunk # Append to read buffer. # Done! var filePath = urlToFilePath(url) print("bytes got: ", rb.size()) save_to_file(filePath, rb) print("file \"", filePath,"\" saved") http.close() emit_signal("download_done", url) func save_to_file(filePath, outBytes): createParentDirs(filePath) var dat_out = File.new() dat_out.open(filePath, File.WRITE) dat_out.store_buffer(outBytes) dat_out.close() func urlToFilePath(queryurl): var filePath = queryurl.replace("https://", "").replace("http://", "").rstrip("/") return "user://"+filePath func getBaseUrl(queryurl): var baseUrl = queryurl.split("/") baseUrl.remove(len(baseUrl)-1) baseUrl = baseUrl.join("/") return baseUrl func fileExists(filePath): var file = File.new() var err = file.open(filePath, file.READ) file.close() if err == OK: return true else: return false func createParentDirs(filePath): var splittedFileName = filePath.split("/") splittedFileName.remove(0) splittedFileName.remove(0) print(splittedFileName) var created = "" print("creating parent dirs") var x = 0 for dir in splittedFileName: if x >= (len(splittedFileName)-1): break print("creating: "+"user://"+created+dir) var newdir = Directory.new() newdir.open("user://"+created) if newdir.dir_exists(dir): print("user://"+created+dir+" already exists") created = created+dir+"/" x += 1 continue newdir.make_dir(dir) created = created+dir+"/" x += 1