[wiki] [sites] Fix copyurl with non-ASCII character occurrences || Michael Buch

From: <git_AT_suckless.org>
Date: Fri, 29 Jan 2021 23:50:38 +0100

commit d1335e5ce2a14ffd8e3e3161098d2263cccef1a7
Author: Michael Buch <michaelbuch12_AT_gmail.com>
Date: Fri Jan 29 22:47:15 2021 +0000

    Fix copyurl with non-ASCII character occurrences
    
    Users of certain IRC clients (e.g., WeeChat)
    had issues with the URL cycling. This happened
    because of an assumption that we are working
    with ASCII-only text. This patch adds support
    for non-ASCII

diff --git a/st.suckless.org/patches/copyurl/index.md b/st.suckless.org/patches/copyurl/index.md
index 0981fa13..4e1586e5 100644
--- a/st.suckless.org/patches/copyurl/index.md
+++ b/st.suckless.org/patches/copyurl/index.md
_AT_@ -21,6 +21,7 @@ Download
 Following patches also highlight the selected urls:
 
 * [st-copyurl-20190202-3be4cf1.diff](st-copyurl-20190202-3be4cf1.diff)
+* [st-copyurl-0.8.4.diff](st-copyurl-0.8.4.diff)
 
 Authors
 -------
diff --git a/st.suckless.org/patches/copyurl/st-copyurl-0.8.4.diff b/st.suckless.org/patches/copyurl/st-copyurl-0.8.4.diff
new file mode 100644
index 00000000..bafe28fd
--- /dev/null
+++ b/st.suckless.org/patches/copyurl/st-copyurl-0.8.4.diff
_AT_@ -0,0 +1,159 @@
+diff -up ../st-0.8.4/config.def.h ./config.def.h
+--- ../st-0.8.4/config.def.h 2020-06-19 10:29:45.000000000 +0100
++++ ./config.def.h 2021-01-29 22:40:56.451916768 +0000
+_AT_@ -199,6 +199,7 @@ static Shortcut shortcuts[] = {
+ { TERMMOD, XK_Y, selpaste, {.i = 0} },
+ { ShiftMask, XK_Insert, selpaste, {.i = 0} },
+ { TERMMOD, XK_Num_Lock, numlock, {.i = 0} },
++ { MODKEY, XK_l, copyurl, {.i = 0} },
+ };
+
+ /*
+diff -up ../st-0.8.4/st.c ./st.c
+--- ../st-0.8.4/st.c 2020-06-19 10:29:45.000000000 +0100
++++ ./st.c 2021-01-29 22:41:18.031954197 +0000
+_AT_@ -200,6 +200,8 @@ static void tdefutf8(char);
+ static int32_t tdefcolor(int *, int *, int);
+ static void tdeftran(char);
+ static void tstrsequence(uchar);
++static void tsetcolor(int, int, int, uint32_t, uint32_t);
++static char * findlastany(char *, const char**, size_t);
+
+ static void drawregion(int, int, int, int);
+
+_AT_@ -2595,3 +2597,122 @@ redraw(void)
+ tfulldirt();
+ draw();
+ }
++
++void
++tsetcolor( int row, int start, int end, uint32_t fg, uint32_t bg )
++{
++ int i = start;
++ for( ; i < end; ++i )
++ {
++ term.line[row][i].fg = fg;
++ term.line[row][i].bg = bg;
++ }
++}
++
++char *
++findlastany(char *str, const char** find, size_t len)
++{
++ char* found = NULL;
++ int i = 0;
++ for(found = str + strlen(str) - 1; found >= str; --found) {
++ for(i = 0; i < len; i++) {
++ if(strncmp(found, find[i], strlen(find[i])) == 0) {
++ return found;
++ }
++ }
++ }
++
++ return NULL;
++}
++
++/*
++** Select and copy the previous url on screen (do nothing if there's no url).
++**
++** FIXME: doesn't handle urls that span multiple lines; will need to add support
++** for multiline "getsel()" first
++*/
++void
++copyurl(const Arg *arg) {
++ /* () and [] can appear in urls, but excluding them here will reduce false
++ * positives when figuring out where a given url ends.
++ */
++ static char URLCHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
++ "abcdefghijklmnopqrstuvwxyz"
++ "0123456789-._~:/?#_AT_!$&'*+,;=%";
++
++ static const char* URLSTRINGS[] = {"http://", "https://"};
++
++ /* remove highlighting from previous selection if any */
++ if(sel.ob.x >= 0 && sel.oe.x >= 0)
++ tsetcolor(sel.nb.y, sel.ob.x, sel.oe.x + 1, defaultfg, defaultbg);
++
++ int i = 0,
++ row = 0, /* row of current URL */
++ col = 0, /* column of current URL start */
++ startrow = 0, /* row of last occurrence */
++ colend = 0, /* column of last occurrence */
++ passes = 0; /* how many rows have been scanned */
++
++ char *linestr = calloc(sizeof(Rune), term.col+1);
++ char *c = NULL,
++ *match = NULL;
++
++ row = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.y : term.bot;
++ LIMIT(row, term.top, term.bot);
++ startrow = row;
++
++ colend = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.x : term.col;
++ LIMIT(colend, 0, term.col);
++
++ /*
++ ** Scan from (term.bot,term.col) to (0,0) and find
++ ** next occurrance of a URL
++ */
++ while(passes !=term.bot + 2) {
++ /* Read in each column of every row until
++ ** we hit previous occurrence of URL
++ */
++ for (col = 0, i = 0; col < colend; ++col,++i) {
++ linestr[i] = term.line[row][col].u;
++ }
++ linestr[term.col] = '++
++ if ((match = findlastany(linestr, URLSTRINGS,
++ sizeof(URLSTRINGS)/sizeof(URLSTRINGS[0]))))
++ break;
++
++ if (--row < term.top)
++ row = term.bot;
++
++ colend = term.col;
++ passes++;
++ };
++
++ if (match) {
++ /* must happen before trim */
++ selclear();
++ sel.ob.x = strlen(linestr) - strlen(match);
++
++ /* trim the rest of the line from the url match */
++ for (c = match; *c != '++ if (!strchr(URLCHARS, *c)) {
++ *c = '++ break;
++ }
++
++ /* highlight selection by inverting terminal colors */
++ tsetcolor(row, sel.ob.x, sel.ob.x + strlen( match ), defaultbg, defaultfg);
++
++ /* select and copy */
++ sel.mode = 1;
++ sel.type = SEL_REGULAR;
++ sel.oe.x = sel.ob.x + strlen(match)-1;
++ sel.ob.y = sel.oe.y = row;
++ selnormalize();
++ tsetdirt(sel.nb.y, sel.ne.y);
++ xsetsel(getsel());
++ xclipcopy();
++ }
++
++ free(linestr);
++}
+Only in .: st-copyurl-0.8.4.diff
+Only in .: st-copyurl-20190202-3be4cf1.diff
+diff -up ../st-0.8.4/st.h ./st.h
+--- ../st-0.8.4/st.h 2020-06-19 10:29:45.000000000 +0100
++++ ./st.h 2021-01-29 22:40:56.451916768 +0000
+_AT_@ -85,6 +85,7 @@ void printscreen(const Arg *);
+ void printsel(const Arg *);
+ void sendbreak(const Arg *);
+ void toggleprinter(const Arg *);
++void copyurl(const Arg *);
+
+ int tattrset(int);
+ void tnew(int, int);
Received on Fri Jan 29 2021 - 23:50:38 CET

This archive was generated by hypermail 2.3.0 : Sat Jan 30 2021 - 00:01:01 CET