Larbin is a web crawler (also called (web) robot, spider, scooter...). It is intended to fetch a large number of web pages to fill the database of a search engine. With a network fast enough, Larbin should be able to fetch more than 100 millions pages on a standard PC. from Giovanni Bechis <g.bechis@snb.it> with tweaks by me and ajacoutot@ ok ajacoutot@
13 lines
588 B
Plaintext
13 lines
588 B
Plaintext
$OpenBSD: patch-src_Makefile,v 1.1.1.1 2007/05/07 11:17:07 jasper Exp $
|
|
--- src/Makefile.orig Sat Apr 28 11:02:16 2007
|
|
+++ src/Makefile Sat Apr 28 11:03:17 2007
|
|
@@ -17,7 +17,7 @@ clean distclean dep prof debug: options.h config.h
|
|
|
|
larbin: $(ABS-UTILS-OBJ) $(ABS-FETCH-OBJ) $(ABS-INTERF-OBJ) $(ABS-MAIN-OBJ)
|
|
$(CXX) $(MF) $(LIBS) -o larbin $(ABS-UTILS-OBJ) \
|
|
- $(ABS-FETCH-OBJ) $(ABS-INTERF-OBJ) $(ABS-MAIN-OBJ) ../adns/libadns.a
|
|
+ $(ABS-FETCH-OBJ) $(ABS-INTERF-OBJ) $(ABS-MAIN-OBJ) ${ADNSDIR}/../lib/libadns.a
|
|
|
|
dep-here:
|
|
makedepend -f- -I$(BASEDIR) -Y *.cc 2> /dev/null > .depend
|