author = {Nagaraja, Shishir and Mittal, Prateek and Hong, Chi-Yao and Caesar, Matthew and Borisov, Nikita},
title = {BotGrep: Finding P2P Bots with Structured Graph Analysis},
year = {2010},
isbn = {8887666655554},
publisher = {USENIX Association},
address = {USA},
abstract = {A key feature that distinguishes modern botnets from earlier counterparts is their increasing use of structured overlay topologies. This lets them carry out sophisticated coordinated activities while being resilient to churn, but it can also be used as a point of detection. In this work, we devise techniques to localize botnet members based on the unique communication patterns arising from their overlay topologies used for command and control. Experimental results on synthetic topologies embedded within Internet traffic traces from an ISP's backbone network indicate that our techniques (i) can localize the majority of bots with low false positive rate, and (ii) are resilient to incomplete visibility arising from partial deployment of monitoring systems and measurement inaccuracies from dynamics of background traffic.},
booktitle = {Proceedings of the 19th USENIX Conference on Security},
pages = {7},
numpages = {1},
location = {Washington, DC},
series = {USENIX Security'10},
}
@inproceedings{bib:botminer2008,
author = {Gu, Guofei and Perdisci, Roberto and Zhang, Junjie and Lee, Wenke},
title = {BotMiner: Clustering Analysis of Network Traffic for Protocol- and Structure-Independent Botnet Detection},
year = {2008},
publisher = {USENIX Association},
address = {USA},
abstract = {Botnets are now the key platform for many Internet attacks, such as spam, distributed denial-of-service (DDoS), identity theft, and phishing. Most of the current botnet detection approaches work only on specific botnet command and control (C&C) protocols (e.g., IRC) and structures (e.g., centralized), and can become ineffective as botnets change their C&C techniques. In this paper, we present a general detection framework that is independent of botnet C&C protocol and structure, and requires no a priori knowledge of botnets (such as captured bot binaries and hence the botnet signatures, and C&C server names/addresses). We start from the definition and essential properties of botnets. We define a botnet as a coordinated group of malware instances that are controlled via C&C communication channels. The essential properties of a botnet are that the bots communicate with some C&C servers/peers, perform malicious activities, and do so in a similar or correlated way. Accordingly, our detection framework clusters similar communication traffic and similar malicious traffic, and performs cross cluster correlation to identify the hosts that share both similar communication patterns and similar malicious activity patterns. These hosts are thus bots in the monitored network. We have implemented our BotMiner prototype system and evaluated it using many real network traces. The results show that it can detect real-world botnets (IRC-based, HTTP-based, and P2P botnets including Nugache and Storm worm), and has a very low false positive rate.},
booktitle = {Proceedings of the 17th Conference on Security Symposium},
eventtitle = {{ICC} 2016 - 2016 {IEEE} International Conference on Communications},
pages = {1--7},
booktitle = {2016 {IEEE} International Conference on Communications ({ICC})},
publisher = {{IEEE}},
author = {Karuppayah, Shankar and Vasilomanolakis, Emmanouil and Haas, Steffen and Muhlhauser, Max and Fischer, Mathias},
urldate = {2021-11-12},
date = {2016-05},
file = {Karuppayah et al. - 2016 - BoobyTrap On autonomously detecting and character.pdf:/home/me/Zotero/storage/UAUH5ZAN/Karuppayah et al. - 2016 - BoobyTrap On autonomously detecting and character.pdf:application/pdf}
title = {{{SensorBuster}}: {{On Identifying Sensor Nodes}} in {{P2P Botnets}}},
shorttitle = {{{SensorBuster}}},
booktitle = {Proceedings of the 12th {{International Conference}} on {{Availability}}, {{Reliability}} and {{Security}}},
author = {Karuppayah, Shankar and Böck, Leon and Grube, Tim and Manickam, Selvakumar and Mühlhäuser, Max and Fischer, Mathias},
date = {2017-08-29},
pages = {1--6},
publisher = {{Association for Computing Machinery}},
location = {{New York, NY, USA}},
doi = {10.1145/3098954.3098991},
url = {https://doi.org/10.1145/3098954.3098991},
urldate = {2021-03-23},
abstract = {The ever-growing number of cyber attacks originating from botnets has made them one of the biggest threat to the Internet ecosystem. Especially P2P-based botnets like ZeroAccess and Sality require special attention as they have been proven to be very resilient against takedown attempts. To identify weaknesses and to prepare takedowns more carefully it is thus a necessity to monitor them by crawling and deploying sensor nodes. This in turn provokes botmasters to come up with monitoring countermeasures to protect their assets. Most existing anti-monitoring countermeasures focus mainly on the detection of crawlers and not on the detection of sensors deployed in a botnet. In this paper, we propose two sensor detection mechanisms called SensorRanker and SensorBuster. We evaluate these mechanisms in two real world botnets, Sality and ZeroAccess. Our results indicate that SensorRanker and SensorBuster are able to detect up to 17 sensors deployed in Sality and four within ZeroAccess.},
file = {/home/me/Zotero/storage/ZDUFTXYY/Karuppayah et al. - 2017 - SensorBuster On Identifying Sensor Nodes in P2P B.pdf},
abstract = {The importance of a Web page is an inherently subjective matter, which depends on the readers interests, knowledge and attitudes. But there is still much that can be said objectively about the relative importance of Web pages. This paper describ es PageRank, a method for rating Web pages objectively and mechanically, effectively measuring the human interest and attention devoted to them. We compare PageRank to an idealized random Web surfer. We show how to efficiently compute PageRank for large numbers of pages. And, we show how to apply PageRank to search and to user navigation.}
eventtitle = {2013 {IEEE} Symposium on Security and Privacy ({SP}) Conference dates subject to change},
pages = {97--111},
booktitle = {2013 {IEEE} Symposium on Security and Privacy},
publisher = {{IEEE}},
author = {Rossow, Christian and Andriesse, Dennis and Werner, Tillmann and Stone-Gross, Brett and Plohmann, Daniel and Dietrich, Christian J. and Bos, Herbert},
urldate = {2022-03-15},
date = {2013-05},
file = {Submitted Version:/home/me/Zotero/storage/7T8RDXXF/Rossow et al. - 2013 - SoK P2PWNED - Modeling and Evaluating the Resilie.pdf:application/pdf}
}
@inproceedings{bib:antonakakis_dga_2012,
author = {Manos Antonakakis and Roberto Perdisci and Yacin Nadji and Nikolaos Vasiloglou and Saeed Abu-Nimeh and Wenke Lee and David Dagon},
title = {From {Throw-Away} Traffic to Bots: Detecting the Rise of {DGA-Based} Malware},
abstract = {Computer criminals regularly construct large distributed attack networks comprised of many thousands of compromised computers around the globe. Once constituted, these attack networks are used to perform computer crimes, creating yet other sets of victims of secondary computer crimes, such as denial of service attacks, spam delivery, theft of personal and financial information for performing fraud, exfiltration of proprietary information for competitive advantage (industrial espionage), etc.The arms race between criminal actors who create and operate botnets and the computer security industry and research community who are actively trying to take these botnets down is escalating in aggressiveness. As the sophistication level of botnet engineering and operations increases, so does the demand on reverse engineering, understanding weaknesses in design that can be exploited on the defensive (or counter-offensive) side, and the possibility that actions to take down or eradicate the botnet may cause unintended consequences.},
booktitle = {Proceedings of the 5th USENIX Conference on Large-Scale Exploits and Emergent Threats},