[wiki] [sites] [copuryl patch] Cleanup URL determination logic || Michael Buch
 
commit 8b1563baffd317eade787e919746a2608e98e2dd
Author: Michael Buch <michaelbuch12_AT_gmail.com>
Date:   Sun May 20 16:39:14 2018 +0100
    [copuryl patch] Cleanup URL determination logic
    
    Separate out into separate function and provide
    ability to add more URLs to the list.
diff --git a/st.suckless.org/patches/copyurl/index.md b/st.suckless.org/patches/copyurl/index.md
index b078e827..0325e8f9 100644
--- a/st.suckless.org/patches/copyurl/index.md
+++ b/st.suckless.org/patches/copyurl/index.md
_AT_@ -25,7 +25,7 @@ Download
 Following patches also highlight the selected urls:
 
  * [st-copyurl-20180514-a7bd977.diff](st-copyurl-20180514-a7bd977.diff)
- * [st-copyurl-20180520-53d59ef.diff](st-copyurl-20180520-53d59ef.diff)
+ * [st-copyurl-20180520-0fe819f.diff](st-copyurl-20180520-0fe819f.diff)
 
 Authors
 -------
diff --git a/st.suckless.org/patches/copyurl/st-copyurl-20180520-53d59ef.diff b/st.suckless.org/patches/copyurl/st-copyurl-20180520-73f375a.diff
similarity index 78%
rename from st.suckless.org/patches/copyurl/st-copyurl-20180520-53d59ef.diff
rename to st.suckless.org/patches/copyurl/st-copyurl-20180520-73f375a.diff
index 4a2e7e1b..6d2fcd0f 100644
--- a/st.suckless.org/patches/copyurl/st-copyurl-20180520-53d59ef.diff
+++ b/st.suckless.org/patches/copyurl/st-copyurl-20180520-73f375a.diff
_AT_@ -1,16 +1,15 @@
-From 53d59ef83576afface82889e21bf86ed9c288496 Mon Sep 17 00:00:00 2001
+From 73f375a7c3256d32c62bdb7d616207071e89f982 Mon Sep 17 00:00:00 2001
 From: Michael Buch <michaelbuch12_AT_gmail.com>
-Date: Sun, 20 May 2018 01:35:03 +0100
-Subject: [PATCH] Handle multiple URLs on single line scenario
+Date: Sun, 20 May 2018 16:14:14 +0100
+Subject: [PATCH] [copuryl patch] Cleanup URL determination logic
 
-When cycling through URLs, instead of picking the first
-URL of each line, cycle through URLs on a single line
-from back to front
+Separate out into separate function and provide
+ability to add more URLs to the list.
 ---
  config.def.h |   1 +
- st.c         | 116 +++++++++++++++++++++++++++++++++++++++++++++++++++
+ st.c         | 123 +++++++++++++++++++++++++++++++++++++++++++++++++++
  st.h         |   1 +
- 3 files changed, 118 insertions(+)
+ 3 files changed, 125 insertions(+)
 
 diff --git a/config.def.h b/config.def.h
 index 82b1b09..cbe923e 100644
_AT_@ -25,7 +24,7 @@ index 82b1b09..cbe923e 100644
  
  /*
 diff --git a/st.c b/st.c
-index 0628707..309aa89 100644
+index 0628707..b08f454 100644
 --- a/st.c
 +++ b/st.c
 _AT_@ -204,6 +204,7 @@ static void tdefutf8(char);
_AT_@ -54,18 +53,21 @@ index 0628707..309aa89 100644
  void
  csihandle(void)
  {
-_AT_@ -2617,3 +2629,107 @@ redraw(void)
+_AT_@ -2617,3 +2629,114 @@ redraw(void)
          tfulldirt();
          draw();
  }
 +
 +char *
-+findlast(char *str, const char* find)
++findlastany(char *str, const char** find, size_t len)
 +{
 +	char* found = NULL;
++	int i = 0;
 +	for(found = str + strlen(str) - 1; found >= str; --found) {
-+		if(strncmp(found, find, strlen(find)) == 0) {
-+			return found;
++		for(i = 0; i < len; i++) {
++			if(strncmp(found, find[i], strlen(find[i])) == 0) {
++				return found;
++			}
 +		}
 +	}
 +
_AT_@ -87,15 +89,18 @@ index 0628707..309aa89 100644
 +		"abcdefghijklmnopqrstuvwxyz"
 +		"0123456789-._~:/?#_AT_!$&'*+,;=%";
 +
++	static const char* URLSTRINGS[] = {"http://", "https://"};
++
 +	/* remove highlighting from previous selection if any */
 +	if(sel.ob.x >= 0 && sel.oe.x >= 0)
 +		tsetcolor(sel.nb.y, sel.ob.x, sel.oe.x + 1, defaultfg, defaultbg);
 +
 +	int i = 0,
-+		row = 0,
-+		col = 0,
-+		startrow = 0,
-+		colend = 0;
++		row = 0, /* row of current URL */
++		col = 0, /* column of current URL start */
++		startrow = 0, /* row of last occurrence */
++		colend = 0, /* column of last occurrence */
++		passes = 0; /* how many rows have been scanned */
 +
 +	char *linestr = calloc(sizeof(char), term.col+1); /* assume ascii */
 +	char *c = NULL,
_AT_@ -112,7 +117,7 @@ index 0628707..309aa89 100644
 + 	** Scan from (term.bot,term.col) to (0,0) and find
 +	** next occurrance of a URL
 +	*/
-+	do {
++	while(passes !=term.bot + 2) {
 +		/* Read in each column of every row until
 + 		** we hit previous occurrence of URL
 +		*/
_AT_@ -124,15 +129,16 @@ index 0628707..309aa89 100644
 +		}
 +		linestr[term.col] = '
Received on Sun May 20 2018 - 17:42:00 CEST
This archive was generated by hypermail 2.3.0
: Sun May 20 2018 - 17:48:22 CEST